query
stringlengths 9
3.4k
| document
stringlengths 9
87.4k
| metadata
dict | negatives
sequencelengths 4
101
| negative_scores
sequencelengths 4
101
| document_score
stringlengths 3
10
| document_rank
stringclasses 102
values |
---|---|---|---|---|---|---|
Ensure admin can't credit negative tickets to a user | def test_credit_ticket_negative_int(self):
user = UserFactory()
self.assertEqual(user.tickets, 1)
nb_tickets_to_add = -5
data = {
'nb_tickets': nb_tickets_to_add,
}
self.client.force_authenticate(user=self.admin)
response = self.client.post(
reverse(
'user-credit-tickets',
kwargs={'pk': user.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_credit_ticket_as_admin(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_200_OK,\n )\n\n self.assertEqual(\n User.objects.get(pk=user.id).tickets,\n 1 + nb_tickets_to_add\n )",
"def test_credit_ticket_as_user(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.user)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_403_FORBIDDEN,\n )",
"def test_credit_ticket_not_int(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 'this is not an int'\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_400_BAD_REQUEST,\n )",
"def test_negative_conditions(self):\r\n outline_url = reverse_course_url('course_handler', self.course.id)\r\n # register a non-staff member and try to delete the course branch\r\n non_staff_client, _ = self.create_non_staff_authed_user_client()\r\n response = non_staff_client.delete(outline_url, {}, HTTP_ACCEPT='application/json')\r\n self.assertEqual(response.status_code, 403)",
"def test_not_permitted(self):\r\n test_user_client, test_user = self.create_non_staff_authed_user_client()\r\n CourseEnrollment.enroll(test_user, self.course.id)\r\n response = test_user_client.get(self.orphan_url)\r\n self.assertEqual(response.status_code, 403)\r\n response = test_user_client.delete(self.orphan_url)\r\n self.assertEqual(response.status_code, 403)",
"def allowed(self, user, amount):\n return True",
"async def ticket_add(self, ctx, user: discord.Member):\n guild_settings = await self.config.guild(ctx.guild).all()\n is_admin = await is_admin_or_superior(self.bot, ctx.author) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in ctx.author.roles]\n )\n must_be_admin = not guild_settings[\"usercanmodify\"]\n\n if not is_admin and must_be_admin:\n await ctx.send(\"Only Administrators can add/remove other users to tickets.\")\n return\n elif not is_admin:\n author = ctx.author\n author_id = author.id\n elif is_admin:\n # Since the author isn't specified, and it's an admin, we need to guess on who\n # the author is\n inverted = {}\n for author_id, tickets in guild_settings[\"created\"].items():\n for ticket in tickets:\n inverted[ticket[\"channel\"]] = author_id\n try:\n author = ctx.guild.get_member(int(inverted[ctx.channel.id]))\n if author:\n author_id = author.id\n else:\n author_id = int(inverted[ctx.channel.id])\n except KeyError:\n author = ctx.author\n author_id = author.id\n\n index = None\n\n if not guild_settings[\"created\"][str(author_id)]:\n await ctx.send(\"You don't have any open tickets.\")\n return\n elif len(guild_settings[\"created\"][str(author_id)]) == 1:\n index = 0\n else:\n for i, ticket in enumerate(guild_settings[\"created\"][str(author_id)]):\n if ticket[\"channel\"] == ctx.channel.id:\n index = i\n break\n\n if index is None:\n await ctx.send(\n \"You have multiple tickets open. \"\n \"Please run this command in the ticket channel you wish to edit.\"\n )\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n\n if user.id in guild_settings[\"created\"][str(author_id)][index][\"added\"]:\n await ctx.send(\"That user is already added.\")\n return\n\n adding_is_admin = await is_admin_or_superior(self.bot, user) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in user.roles]\n )\n\n if adding_is_admin:\n await ctx.send(\"You cannot add a user in support or admin team.\")\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n if not channel:\n await ctx.send(\"The ticket channel has been deleted.\")\n return\n\n try:\n await channel.set_permissions(user, send_messages=True, read_messages=True)\n except discord.Forbidden:\n await ctx.send(\n \"The Manage Permissions channel for me has been removed. \"\n \"I am unable to modify this ticket.\"\n )\n return\n\n async with self.config.guild(ctx.guild).created() as created:\n created[str(author_id)][index][\"added\"].append(user.id)\n\n await ctx.send(f\"{user.mention} has been added to the ticket.\")",
"def raise_not_editable(self, viewer):\n if not self.id or viewer.has_perm(\"bookwyrm.create_invites\"):\n return\n raise PermissionDenied()",
"async def ticket_remove(self, ctx, user: discord.Member):\n guild_settings = await self.config.guild(ctx.guild).all()\n is_admin = await is_admin_or_superior(self.bot, ctx.author) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in ctx.author.roles]\n )\n must_be_admin = not guild_settings[\"usercanmodify\"]\n\n if not is_admin and must_be_admin:\n await ctx.send(\"Only Administrators can add/remove other users to tickets.\")\n return\n elif not is_admin:\n author = ctx.author\n author_id = author.id\n elif is_admin:\n # Since the author isn't specified, and it's an admin, we need to guess on who\n # the author is\n inverted = {}\n for author_id, tickets in guild_settings[\"created\"].items():\n for ticket in tickets:\n inverted[ticket[\"channel\"]] = author_id\n try:\n author = ctx.guild.get_member(int(inverted[ctx.channel.id]))\n if author:\n author_id = author.id\n else:\n author_id = int(inverted[ctx.channel.id])\n except KeyError:\n author = ctx.author\n author_id = author.id\n\n index = None\n\n if not guild_settings[\"created\"][str(author_id)]:\n await ctx.send(\"You don't have any open tickets.\")\n return\n elif len(guild_settings[\"created\"][str(author_id)]) == 1:\n index = 0\n else:\n for i, ticket in enumerate(guild_settings[\"created\"][str(author_id)]):\n if ticket[\"channel\"] == ctx.channel.id:\n index = i\n break\n\n if index is None:\n await ctx.send(\n \"You have multiple tickets open. \"\n \"Please run this command in the ticket channel you wish to edit.\"\n )\n return\n\n if user.id not in guild_settings[\"created\"][str(author_id)][index][\"added\"]:\n await ctx.send(\"That user is not added.\")\n return\n\n removing_is_admin = await is_admin_or_superior(self.bot, user) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in user.roles]\n )\n\n if removing_is_admin:\n await ctx.send(\"You cannot remove a user in support or admin team.\")\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n if not channel:\n await ctx.send(\"The ticket channel has been deleted.\")\n\n try:\n await channel.set_permissions(user, send_messages=False, read_messages=False)\n except discord.Forbidden:\n await ctx.send(\n \"The Manage Permissions channel for me has been removed. \"\n \"I am unable to modify this ticket.\"\n )\n return\n\n async with self.config.guild(ctx.guild).created() as created:\n created[str(author_id)][index][\"added\"].remove(user.id)\n\n await ctx.send(f\"{user.mention} has been removed from the ticket.\")",
"def cant(user, action):\n\n return not can(user, action)",
"def check_credit(self):\n self.ensure_one()\n getattr(self, '%s_check_credit' % self.provider, lambda: None)()",
"def user_allow_credit(self):\n try:\n return self.user.creditAllowed()\n except AttributeError:\n return False",
"def disallow_handler(update, _):\n global TEMPORARILY_ALLOWED\n user_id = update.message.chat.id\n if user_id == ADMIN_ID:\n TEMPORARILY_ALLOWED = False\n update.message.reply_text(\"Temprarily allowed disabled!\")",
"def get_everyone_denied(self):",
"def test_can_not_cancel_past_block(self):\n date = datetime.now().replace(minute=0, second=0, microsecond=0) - timedelta(hours=1)\n\n response = self.client.post(\n reverse('bookings', kwargs={'facility': 'g'}), {'cancel': str(date.timestamp())})\n\n context = response.context\n bookings = context[\"bookings\"]\n\n self.assertEqual(response.status_code, 403)\n self.assertEqual(context[\"quota\"], settings.BOOKINGS_QUOTA)",
"def write_authorize(cls, user, obj):\n if not obj.delivery.deadline.assignment_group.is_examiner(user):\n raise PermissionDenied()",
"def test_requester_is_no_admin(self) -> None:\n\n channel = self.make_request(\n \"GET\",\n self.url,\n access_token=self.other_user_tok,\n )\n\n self.assertEqual(403, channel.code, msg=channel.json_body)\n self.assertEqual(Codes.FORBIDDEN, channel.json_body[\"errcode\"])",
"def test_requester_is_no_admin(self) -> None:\n\n channel = self.make_request(\n \"GET\",\n self.url,\n access_token=self.other_user_tok,\n )\n\n self.assertEqual(403, channel.code, msg=channel.json_body)\n self.assertEqual(Codes.FORBIDDEN, channel.json_body[\"errcode\"])",
"def test_deny_pending_payment(self):\n pass",
"def check_admin():\n\tif not current_user.is_admin:\n\t\tabort(403)",
"def check_admin():\r\n if not current_user.is_admin:\r\n abort(403)",
"def no_reason(message, db):\n #message.reply(Strings['GRANT_EXAMPLE'].format(db))\n try:\n hf.grant(message, db.lower(), \"[EXTENDING ACCESS TIME]\", False)\n except Exception as e:\n message._client.send_message(errors_channel, \"```{}```\".format(e))",
"def test_buyTicket_NotForSale():\n old_venue_balance = testVenue.wallet\n assert not testUser2.buyTicket(testTicket2)\n assert testTicket2 not in testUser2.inventory\n assert not testTicket1.for_sale\n assert testUser2.wallet == 500\n assert testVenue.wallet == old_venue_balance",
"def test_can_not_cancel_current_block(self):\n date = datetime.now().replace(minute=0, second=0, microsecond=0)\n\n response = self.client.post(\n reverse('bookings', kwargs={'facility': 'g'}), {'cancel': str(date.timestamp())})\n\n context = response.context\n bookings = context[\"bookings\"]\n\n self.assertEqual(response.status_code, 403)\n self.assertEqual(context[\"quota\"], settings.BOOKINGS_QUOTA)",
"def temporarily_allow_handler(update, _):\n global TEMPORARILY_ALLOWED\n user_id = update.message.chat.id\n if user_id == ADMIN_ID:\n TEMPORARILY_ALLOWED = True\n update.message.reply_text(\"Temprarily allowed!\")",
"def no_reason(message, db):\n #message.reply(Strings['GRANT_EXAMPLE'].format(db))\n try:\n hf.grant(message, db.lower(), \"[EXTENDING ACCESS TIME]\", True)\n except Exception as e:\n message._client.send_message(errors_channel, \"```{}```\".format(e))",
"def admin_reject(user):\n if user.comments in (None or \"\"):\n return\n\n subject = \"ECE/CIS Account - Account Application rejected for %s\" % user.username\n application = \"https://www.eecis.udel.edu/NewAccount/\"\n helprequest = \"https://www.eecis.udel.edu/service\"\n sponsor = \"%[email protected]\" % user.sponsor\n \n message = \"Your ECE/CIS Account has been rejected by ECE/CIS faculty adminstrators.\\n\" % user.sponsor\n message += \"The reason given for rejection was:\\n\\n%s\\n\\n\" % user.comments\n message += \"You may re-apply with corrected information at %s\\n\" % application\n message += \"Please don't reply to this email. If have any questions, please \\n\"\n message += \"please post a ticket as an outsider at %s\" % helprequest\n message += \"-- ECE\\CIS Labstaff\"\n\n\n send('[email protected]', 'ECE/CIS Account System', \\\n [user.email, sponsor], subject, message, MAILHOST)",
"def check_admin():\n if not current_user.is_admin:\n abort(403)",
"def check_admin():\n if not current_user.is_admin:\n abort(403)",
"def test_requester_is_no_admin(self) -> None:\n\n channel = self.make_request(\n \"DELETE\",\n self.url,\n access_token=self.other_user_tok,\n )\n\n self.assertEqual(403, channel.code, msg=channel.json_body)\n self.assertEqual(Codes.FORBIDDEN, channel.json_body[\"errcode\"])",
"def test_noTicket():\n assert testUser1.buyTicket(None) == False",
"def test_user_can_change_not_author(self):\n self.assertFalse(self.story.user_can_change(self.user2))",
"def test_validate_ticket_no_ticket(self):\n with self.assertRaises(InvalidRequest):\n ProxyGrantingTicket.objects.validate_ticket(None, 'https://www.example.com')",
"def avoid_lockouts():\n db = get_db()\n if db.count_admins()[0][0] <= 2:\n session[\"last_error\"] = \"There must always be at least two administrators.\"\n return False\n return True",
"def can_edit_or_403(self, user):\n if user.id != self.game_master.id:\n raise PermissionDenied\n return True",
"def user_requested_access(user):\r\n user = CourseCreator.objects.get(user=user)\r\n if user.state != CourseCreator.GRANTED:\r\n user.state = CourseCreator.PENDING\r\n user.save()",
"def userreject_admin(user_id):\n\n # User objects list which includes list of all users which can be broken down into editors and sponsors\n # get individual user\n user = db.session.query(User).filter(User.id==user_id).first()\n # update status to approved\n user.user_status = 'rejected'\n # commit to database\n db.session.commit()\n\n return redirect(url_for('admin_bp.usersview_admin'))",
"def write_authorize_examinercommon(cls, user, obj):\n if obj.delivered_by != None:\n raise PermissionDenied()",
"def _check_cost(self, cr, uid, ids, context=None):\n for enrich in self.browse(cr, uid, ids, context=context):\n if enrich.amount <= 0:\n raise osv.except_osv(_('ValidateError'), _('The Cost Must Be Greater Than Zero!'))\n return True",
"def test_renew_user_pending_cancel(self):\n self.braintree_customer.active = True\n self.braintree_customer.pending_cancel = True\n self.braintree_customer.subscription_id = \"ValidSubscriptionID\"\n\n result = SubscriptionManager.renew(self.braintree_customer)\n self.assertEqual(\"ValidSubscriptionID\",result)\n self.assertFalse(self.braintree_customer.pending_cancel)",
"def make_eligible(self):\n pass",
"def test_user_cannot_unlock_hint():\n app = create_ctfd()\n with app.app_context():\n with app.test_client():\n register_user(app, name=\"user1\", email=\"[email protected]\")\n\n chal = gen_challenge(app.db, value=100)\n chal_id = chal.id\n\n gen_flag(app.db, challenge_id=chal.id, content=\"flag\")\n\n hint = gen_hint(db, chal_id, cost=10)\n hint_id = hint.id\n\n client = login_as_user(app, name=\"user1\", password=\"password\")\n\n with client.session_transaction():\n r = client.get(\"/api/v1/hints/{}\".format(hint_id))\n resp = r.get_json()\n assert resp[\"data\"].get(\"content\") is None\n assert resp[\"data\"].get(\"cost\") == 10\n destroy_ctfd(app)",
"def test_not_owner(self):\n creating_user = create_user()\n creating_user.save()\n festival = create_festival('test', creating_user)\n festival.save()\n\n concert = create_concert(festival, 'test')\n concert.save()\n\n login(self.client)\n\n client = create_client('test')\n client.delete_access = True\n client.save()\n\n response = self.client.post('/backend/u/conc/', {'client': 'test', 'id': concert.pk})\n self.assertEqual(response.status_code, 200)\n self.assertEqual('Permission not granted', response.content.decode('utf-8'))",
"def restricted(func):\n @wraps(func)\n def wrapped(bot, update, *args, **kwargs):\n user_id = update.effective_user.id\n if user_id not in LIST_OF_ADMINS:\n print(\"Unauthorized access denied for {}.\".format(user_id))\n # tell the unauthorized user to go away\n update.message.reply_text('Go away.')\n return\n return func(bot, update, *args, **kwargs)\n return wrapped",
"def admin_bankruptcy(self, with_report=False):\n name = self.client.srandmember(self._NAMES)\n self.admin_bankruptcy_forced_cancellation(name=name, with_report=with_report)\n self.admin_bankruptcy_notification(name=name)",
"def testPostAccessDenied(self):\n self.runPost(None, data=self.post_data)\n self.response_401()\n for user in (self.guest, self.norole, self.unrelated_owner):\n self.runPost(user, data=self.post_data)\n self.response_403()",
"async def admin_credit(self, ctx, target: discord.Member, sum: int = 100):\n if is_registered(target.id):\n \n inventories = get_file(\"inventories\")\n inventories[str(target.id)][\"balance\"] += sum\n update_file(\"inventories\", inventories)\n\n embed = discord.Embed(color=admin_color)\n embed.set_author(name=\"🛠️ Admin\")\n embed.add_field(name=\"💰 Credit\",\n value=f\"{ctx.author.mention}, {target.mention} a été crédité de `{sum}` PO (pièces d'or)\")\n embed = set_footer(embed, ctx)\n await ctx.send(embed=embed)",
"def test_modify_access_revoke_not_allowed(self):\r\n url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})\r\n response = self.client.get(url, {\r\n 'unique_student_identifier': self.other_staff.email,\r\n 'rolename': 'instructor',\r\n 'action': 'revoke',\r\n })\r\n self.assertEqual(response.status_code, 200)",
"def test_validate_ticket_invalid_ticket(self):\n with self.assertRaises(InvalidTicket):\n ProxyGrantingTicket.objects.validate_ticket('12345', 'https://www.example.com')",
"def testUpdateAccessDenied(self):\n self.runPut(None, sequencer=self.hiseq2000.sodar_uuid, data=self.post_data)\n self.response_401()\n for user in (self.guest, self.norole, self.unrelated_owner):\n self.runPut(user, sequencer=self.hiseq2000.sodar_uuid, data=self.post_data)\n self.response_403()",
"def test_no_credit_change_for_subscription_downgrade(self):\n credits = 20\n\n current_plan = Subscription.get_plan_by_id('pro')\n new_plan = Subscription.get_plan_by_id('standard')\n\n credits = add_subscription_credits(credits, current_plan, new_plan, None)\n\n assert credits == 20",
"def validate_ownership(item, user_id):\n if item.user_id != user_id:\n raise Forbidden('You are not allowed to modify this item.')",
"def test_access_negative(self, api):\n self.builder.add_user(api.get_user())\n r1 = api.access_user(api.get_user(), False)\n access_false = self.builder.get_access(api.get_user())\n self.builder.del_user(api.get_user())\n assert access_false == 0\n assert r1.status_code == 200",
"def test_dont_cancel_for_events_with_no_cost(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2015, 2, 11, 10, tzinfo=dt_timezone.utc\n )\n self.ticketed_event.ticket_cost = 0\n self.ticketed_event.save()\n self.assertFalse(self.unpaid.cancelled)\n self.assertFalse(self.paid.cancelled)\n\n management.call_command('cancel_unpaid_ticket_bookings')\n # emails are sent to user per cancelled booking and studio once for all\n # cancelled bookings\n self.unpaid.refresh_from_db()\n self.paid.refresh_from_db()\n self.assertEqual(len(mail.outbox), 0)\n self.assertFalse(self.unpaid.cancelled)\n self.assertFalse(self.paid.cancelled)",
"def test_admin_approval_not_activated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n\n user = self.registration_profile.objects.admin_approve_user(\n profile.id, Site.objects.get_current())\n self.assertIs(user, False)\n self.assertIs(profile.user.is_active, False)",
"def can_approve(self, user, **data):\n raise Return(False)",
"def test_get_non_owner(self):\n another_user = CustomUser(id=101, email='[email protected]', is_active=True)\n another_user.set_password('testpassword')\n another_user.save()\n self.client.login(email='[email protected]', password='testpassword')\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 403)",
"def test_user_cannot_unlock_hint():\n app = create_ctfd()\n with app.app_context():\n with app.test_client() as client:\n register_user(app, name=\"user1\", email=\"[email protected]\")\n\n chal = gen_challenge(app.db, value=100)\n chal_id = chal.id\n\n flag = gen_flag(app.db, chal=chal.id, flag='flag')\n\n hint = gen_hint(db, chal_id, cost=10)\n hint_id = hint.id\n\n client = login_as_user(app, name=\"user1\", password=\"password\")\n\n with client.session_transaction() as sess:\n data = {\n \"nonce\": sess.get('nonce')\n }\n r = client.post('/hints/{}'.format(hint_id), data=data)\n resp = json.loads(r.data.decode('utf8'))\n assert resp.get('errors') == 'Not enough points'\n destroy_ctfd(app)",
"def test_get_non_owner(self):\n another_user = CustomUser(id=101, email='[email protected]', is_active=True)\n another_user.set_password('testpassword')\n another_user.save()\n self.client.login(email='[email protected]', password='testpassword')\n\n url = reverse('route', kwargs={'way_id': self.route.way_id, 'route_id': self.route.id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 403)",
"def test_not_permitted(self, default_store):\n course = self.create_course_with_orphans(default_store)\n orphan_url = reverse_course_url('orphan_handler', course.id)\n\n test_user_client, test_user = self.create_non_staff_authed_user_client()\n CourseEnrollment.enroll(test_user, course.id)\n response = test_user_client.get(orphan_url)\n self.assertEqual(response.status_code, 403)\n response = test_user_client.delete(orphan_url)\n self.assertEqual(response.status_code, 403)",
"def test_redemption_denied_unpaid(\n self, voucher: Voucher, counter: int, extra_tokens: int\n ) -> None:\n num_tokens = counter + extra_tokens\n issuer = unpaid_redemption()\n treq = treq_for_loopback_ristretto(issuer)\n redeemer = RistrettoRedeemer(treq, NOWHERE)\n random_tokens = redeemer.random_tokens_for_voucher(voucher, counter, num_tokens)\n d = redeemer.redeemWithCounter(\n voucher,\n counter,\n random_tokens,\n )\n self.assertThat(\n Deferred.fromCoroutine(d),\n failed(\n AfterPreprocessing(\n lambda f: f.value,\n IsInstance(Unpaid),\n ),\n ),\n )",
"def test_non_contractor_acks_receipt(self):\n res = self.client.post(self.url)\n self.assertEqual(res.status_code, 403)",
"def test_reject_agreement(self):\n pass",
"def test_withdraw_amount_view_with_negative_amount(self):\n self.account.current_balance = 100000\n self.account.save()\n\n client.force_authenticate(user=self.account.user, token=self.token)\n url = reverse('customer_withdraw')\n request = client.post(url, {'amount': -100}, format='json')\n self.assertEqual(400, request.status_code)",
"def test_can_not_book_past_block(self):\n date = datetime.now().replace(minute=0, second=0, microsecond=0) - timedelta(hours=1)\n\n response = self.client.post(\n reverse('bookings', kwargs={'facility': 'g'}), {'book': str(date.timestamp())})\n\n context = response.context\n bookings = context[\"bookings\"]\n\n self.assertEqual(response.status_code, 403)\n self.assertEqual(context[\"quota\"], settings.BOOKINGS_QUOTA)",
"def test_no_enable_paid_course_registration(self):\r\n self.add_to_cart()\r\n self.request.user = self.user\r\n context = user_has_cart_context_processor(self.request)\r\n self.assertFalse(context['display_shopping_cart'])",
"def testGetAccessDenied(self):\n self.runGet(None, sequencer=self.hiseq2000.vendor_id)\n self.response_401()\n for user in (self.norole, self.unrelated_owner):\n self.runGet(user)\n self.response_403()",
"def no_reason(message, db):\n message.reply(Strings['GRANT_EXAMPLE'].format(db))",
"def test_ui_check_answer_negative(capsys, test):\n assert 'Invalid input' in hl.test_help_ui_check_answer_negative(capsys,\n test)",
"def test_delete_assigned_resource_by_non_admin(self):\n CommonTestCases.user_token_assert_in(\n self,\n delete_assigned_resource_mutation,\n \"You are not authorized to perform this action\"\n )",
"def validate(self,admin):\n\n rv=admin.helper.setAmount(admin.userName,\n 'ARS',self.actual+self.cnt)\n if rv is None:\n return False\n else:\n return True",
"def raise_not_editable(self, viewer):\n if viewer.has_perm(\"bookwyrm.edit_instance_settings\"):\n return\n raise PermissionDenied()",
"def test_no_admins_registered(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n\n with self.assertRaises(ImproperlyConfigured):\n self.registration_profile.objects.send_admin_approve_email(\n new_user, Site.objects.get_current())",
"def test_create_ticket_no_expires(self):\n st = ServiceTicket.objects.create_ticket(user=self.user)\n self.assertTrue(st.expires > now())",
"def add_user_with_status_unrequested(user):\r\n _add_user(user, CourseCreator.UNREQUESTED)",
"def test_auth_private_unowned(self):\n self.do_visible(False, 'pattieblack', False, tenant='froggy')",
"def test_switch_to_free_no_expiry(self):\n u = User.objects.get(username=\"test1\")\n u.userplan.expire = date.today() + timedelta(days=14)\n self.assertIsNotNone(u.userplan.expire)\n\n plan = Plan.objects.get(name=\"Free\")\n self.assertTrue(plan.is_free())\n self.assertNotEqual(u.userplan.plan, plan)\n\n # Switch to Free Plan\n u.userplan.extend_account(plan, None)\n self.assertEqual(u.userplan.plan, plan)\n self.assertIsNone(u.userplan.expire)\n self.assertEqual(u.userplan.active, True)",
"def add_ticket(self, user):\n profile = user.get_profile()\n if profile.available_tickets() <= 0:\n raise Exception(\"This user does not have any tickets to allocate.\")\n \n ticket = RaffleTicket(raffle_prize=self, user=user)\n ticket.save()",
"def test_can_not_reserve_booked_block(self):\n booking_other = create_test_booking(self.someone, self.first_day, 11)\n\n response = self.client.post(\n reverse('bookings', kwargs={'facility': 'g'}), {'book': str(booking_other.date.timestamp())})\n\n context = response.context\n bookings = context[\"bookings\"]\n\n self.assertEqual(response.status_code, 403)\n self.assertEqual(context[\"quota\"], settings.BOOKINGS_QUOTA)\n\n self.assertEqual(type(context[\"info\"]), NotAllowedAlert)",
"def test_buyTicket_AlreadySold():\n assert not testUser2.buyTicket(testTicket1)\n assert testTicket1 in testUser1.inventory\n assert testTicket1 not in testUser2.inventory\n assert not testTicket1.for_sale\n assert testUser2.wallet == 500",
"def can_accept_credit(self, value):\n return value >= 0",
"def test_revoke_inactive(self):\n self.invite.active = False\n self.invite.save()\n url = reverse(\n 'projectroles:api_invite_revoke',\n kwargs={'projectinvite': self.invite.sodar_uuid},\n )\n response = self.request_knox(url, method='POST')\n self.assertEqual(response.status_code, 400, msg=response.content)",
"def cancel_dummy(self):\n if self.state != 'authorized':\n self.raise_user_error('cancel_only_authorized')\n else:\n self.state = 'cancel'\n self.save()",
"async def plaguebearer(self, ctx):\n currency = await bank.get_currency_name(ctx.guild)\n await self.config.user(ctx.author).gameRole.set(\"Plaguebearer\")\n await self.notify_user(ctx=ctx, user=ctx.author, notificationType=\"plaguebearer\")\n await ctx.send(f\"{ctx.author} has spent 10,000 {currency} and become a Plaguebearer.\")",
"def test_validate_ticket_consumed_ticket(self):\n pgt = ProxyGrantingTicketFactory(consume=True)\n with self.assertRaises(InvalidTicket):\n ProxyGrantingTicket.objects.validate_ticket(pgt.ticket, 'https://www.example.com')",
"async def _bailout_heist(self, ctx, user: discord.Member=None):\r\n author = ctx.message.author\r\n theme = await self.thief.get_guild_theme(ctx.guild)\r\n\r\n t_bail = theme[\"Bail\"]\r\n t_sentence = theme[\"Sentence\"]\r\n\r\n if user is None:\r\n player = author\r\n else:\r\n player = user\r\n\r\n if await self.thief.get_member_status(player) != \"Apprehended\":\r\n return await ctx.send(\"{} is not in jail.\".format(player.display_name))\r\n\r\n cost = await self.thief.get_member_bailcost(player)\r\n if not await bank.get_balance(player) >= cost:\r\n await ctx.send(\"You do not have enough to afford the {} amount.\".format(t_bail))\r\n return\r\n\r\n if player.id == author.id:\r\n msg = (\"Do you want to make a {0} amount? It will cost {1} credits. If you are \"\r\n \"caught again, your next {2} and {0} amount will triple. \"\r\n \"Do you still wish to pay the {0} amount?\".format(t_bail, cost, t_sentence))\r\n else:\r\n msg = (\"You are about pay a {2} amount for {0} and it will cost you {1} credits. \"\r\n \"Are you sure you wish to pay {1} for {0}?\".format(player.name, cost, t_bail))\r\n\r\n await ctx.send(msg)\r\n response = await self.bot.wait_for('MESSAGE', timeout=15, check=lambda x: x.author == author)\r\n\r\n if response is None:\r\n await ctx.send(\"You took too long. canceling transaction.\")\r\n return\r\n\r\n if \"yes\" in response.content.lower():\r\n msg = (\"Congratulations {}, you are free! Enjoy your freedom while it \"\r\n \"lasts...\".format(player.display_name))\r\n await bank.withdraw_credits(author, cost)\r\n await self.thief.set_member_free(author)\r\n await self.thief.set_member_oob(author, False)\r\n elif \"no\" in response.content.lower():\r\n msg = \"Canceling transaction.\"\r\n else:\r\n msg = \"Incorrect response, canceling transaction.\"\r\n\r\n await ctx.send(msg)",
"def test_owner_edit_assessment_invalid(self):\n req, resp = data.get_assessment(self.contract['id'])\n response = self.user_01.put(self.assessment_report_url, req)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def test_buyTicket_FreeTicket():\n old_venue_balance = testVenue.wallet\n assert testUser4.buyTicket(testTicket4)\n assert testUser4.inventory[-1] == testTicket4\n assert not testTicket4.for_sale\n assert testUser4.wallet == 0\n assert testVenue.wallet == old_venue_balance",
"def test_admin_cannot_add_item(self):\n response = self.client.get(\n '/self.base_url/sales/3/2',\n headers=dict(Authorization=\"Bearer \" + self.owner_token),\n content_type = 'application/json'\n )\n\n response_data = json.loads(response.data)\n self.assertEqual(response_data['message'],\"You cannot make a sale from an Admin account, Consider having an attendant account\")\n self.assertEqual(response.status_code,401)",
"def disability_specify(self, instance):\r\n return instance.user.profile.disability_specify",
"def test_case_user_not_yet_customer(self):\n pass",
"async def _pay_money(ctx, user : discord.Member, amount : int):\n if amount<0:\n await bot.reply(\"You can't pay someone a negative amount!\")\n elif user==ctx.message.author:\n await bot.reply(\"You can't pay yourself!\")\n else:\n await transfer(bot, ctx.message.author, user, amount)",
"def test_course_does_not_expire_for_verified_user(self):\n course = CourseFactory.create(start=THREE_YEARS_AGO)\n url = course_home_url(course)\n\n user = UserFactory.create(password=self.TEST_PASSWORD)\n CourseEnrollment.enroll(user, self.course.id, mode=CourseMode.VERIFIED)\n Schedule.objects.update(start_date=THREE_YEARS_AGO)\n\n # ensure that the user who has indefinite access\n self.client.login(username=user.username, password=self.TEST_PASSWORD)\n response = self.client.get(url)\n assert response.status_code == 200, 'Should not expire access for user'",
"def cancel_loan(request):\n if request.user.is_superuser:\n for user in User.objects.all():\n user.cancel_loan()\n return HttpResponse('Loan Deducted', status=200)\n return redirect('home')",
"def test_delete_non_owner(self):\n another_user = CustomUser.objects.create(id=134, email='[email protected]', is_active=True)\n another_user.set_password('qwerty12345')\n another_user.save()\n\n self.client.login(email='[email protected]', password='qwerty12345')\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 87876})\n\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 403)",
"def test_admin_approval_nonexistent_id(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n\n user = self.registration_profile.objects.admin_approve_user(\n profile.id, Site.objects.get_current())\n self.assertIs(user, False)",
"def test_handle_lead_not_admin(self):\n test_user = User(\"userid\")\n team = Team(\"BRS\", \"brs\", \"web\")\n self.db.retrieve.return_value = test_user\n self.db.query.return_value = [team]\n with self.app.app_context():\n self.assertTupleEqual(self.testcommand.handle(\"team \"\n \"lead brs ID\", user),\n (self.testcommand.permission_error, 200))\n self.db.store.assert_not_called()",
"def test_audit_only_not_expired(self):\n CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2010, 1, 1, tzinfo=UTC))\n audit_only_course = CourseFactory.create()\n self.create_user_for_course(audit_only_course, CourseUserType.ENROLLED)\n response = self.client.get(course_home_url(audit_only_course))\n assert response.status_code == 200\n self.assertContains(response, TEST_COURSE_TOOLS)\n self.assertNotContains(response, TEST_BANNER_CLASS)",
"def test_user_can_change_admin(self):\n self.assertTrue(self.story.user_can_change(self.admin_user))",
"def write_authorize(cls, user, obj):\n if not cls._meta.model.published_where_is_examiner(user).filter(id=obj.id):\n raise PermissionDenied()\n if obj.id == None:\n raise PermissionDenied() # We only allow update"
] | [
"0.69149536",
"0.65459514",
"0.63113207",
"0.63042814",
"0.617169",
"0.6162346",
"0.60777926",
"0.60088956",
"0.5990186",
"0.5983708",
"0.59674823",
"0.59468544",
"0.59359336",
"0.59180915",
"0.5916562",
"0.58903867",
"0.58831507",
"0.58831507",
"0.58763945",
"0.58660877",
"0.5841624",
"0.5831001",
"0.5813524",
"0.5813499",
"0.58131415",
"0.5809147",
"0.57989734",
"0.57705516",
"0.57705516",
"0.57644874",
"0.5757944",
"0.5747579",
"0.573642",
"0.57276446",
"0.5721313",
"0.5717943",
"0.5706317",
"0.57030743",
"0.5684528",
"0.5664207",
"0.56488585",
"0.5632984",
"0.56285924",
"0.5601063",
"0.5600252",
"0.5591465",
"0.55857766",
"0.55843925",
"0.5583866",
"0.55822873",
"0.55814856",
"0.5573221",
"0.55643827",
"0.55485666",
"0.5547683",
"0.55452853",
"0.5544648",
"0.5538844",
"0.5535413",
"0.5530227",
"0.5529178",
"0.5528675",
"0.55170935",
"0.55037135",
"0.54974115",
"0.5495423",
"0.549079",
"0.54851675",
"0.5483512",
"0.5481943",
"0.5473158",
"0.54700243",
"0.546596",
"0.5453061",
"0.54389226",
"0.5438368",
"0.5437107",
"0.54361033",
"0.54346716",
"0.54270744",
"0.54253095",
"0.54208535",
"0.54207504",
"0.54197717",
"0.5415386",
"0.54127717",
"0.5407453",
"0.540316",
"0.54016095",
"0.5389299",
"0.5388659",
"0.53845614",
"0.53827006",
"0.5377585",
"0.5374283",
"0.5367238",
"0.53602254",
"0.53598595",
"0.53596675",
"0.53588665"
] | 0.73568875 | 0 |
Convert a django model to a Python dictionary. | def model_to_dict(instance):
opts = instance._meta
data = {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if instance.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def serialize(model):\n # first we get the names of all the columns on your model\n columns = [c.key for c in class_mapper(model.__class__).columns]\n # then we return their values in a dict\n return dict((c, getattr(model, c)) for c in columns)",
"def serialize(model):\n # first we get the names of all the columns on your model\n columns = [c.key for c in class_mapper(model.__class__).columns]\n # then we return their values in a dict\n return dict((c, getattr(model, c)) for c in columns)",
"def serialize(model):\n # first we get the names of all the columns on your model\n columns = [c.key for c in class_mapper(model.__class__).columns]\n # then we return their values in a dict\n return dict((c, getattr(model, c)) for c in columns)",
"def as_dict(self):\n data = dict()\n for name in self.fields:\n val = getattr(self, name)\n if isinstance(val, Model):\n val = val.as_dict()\n elif isinstance(val, list) and val and isinstance(val[0], Model):\n val = [sub.as_dict() for sub in val]\n data[name] = val\n return data",
"def get_modelDict(self):\n return self.__modelDict",
"def to_dict(model):\n result = {}\n for attr, _ in six.iteritems(model.swagger_types):\n value = getattr(model, attr)\n result[attr] = to_dict_value(value)\n return result",
"def to_dict_model(self) -> dict:\n return dict((key, getattr(self, key)) for key in self.__mapper__.c.keys())",
"def model_to_json(model: Base) -> Dict[str, Any]:\n json = {}\n for col in model.__mapper__.attrs.keys(): # type: ignore\n if col != \"hashed_password\" and col != \"salt\":\n if col in datetime_cols:\n # Cast datetime object to string\n json[col] = str(getattr(model, col))\n else:\n json[col] = getattr(model, col)\n return json",
"def model_to_dict(instance):\n opts = instance._meta\n data = {}\n for f in opts.concrete_fields + opts.many_to_many:\n if isinstance(f, ManyToManyField):\n if instance.pk is None:\n data[f.name] = []\n else:\n try:\n data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))\n except Exception as e:\n print(e)\n data[f.name] = []\n else:\n data[f.name] = f.value_from_object(instance)\n return data",
"def my_model_to_dict(instance, fields=None, exclude=None):\n # avoid a circular import\n from django.db.models.fields.related import ManyToManyField\n opts = instance._meta\n data = {}\n for f in opts.concrete_fields + opts.many_to_many:\n # if not f.editable:\n # continue\n if fields and not f.name in fields:\n continue\n if exclude and f.name in exclude:\n continue\n if isinstance(f, ManyToManyField):\n # If the object doesn't have a primary key yet, just use an empty\n # list for its m2m fields. Calling f.value_from_object will raise\n # an exception.\n if instance.pk is None:\n data[f.name] = []\n else:\n # MultipleChoiceWidget needs a list of pks, not object instances.\n data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))\n else:\n value = f.value_from_object(instance)\n if isinstance(value, decimal.Decimal) or isinstance(value, datetime) or isinstance(value, date):\n value_str = str(value)\n data[f.name] = value_str\n else:\n data[f.name] = f.value_from_object(instance)\n return data",
"def test_todictreturntype(self):\n b1 = BaseModel()\n self.assertEqual(type(b1.to_dict()), dict)",
"def model_to_dict(instance, fields=None, exclude=None):\n opts = instance._meta\n data = {}\n omit = getattr(SETTINGS, 'OMIT_UN_EDITABLE_FIELDS', False)\n for f in chain(opts.concrete_fields, opts.private_fields, opts.many_to_many):\n if omit and not getattr(f, 'editable', False):\n continue\n if fields and f.name not in fields:\n continue\n if exclude and f.name in exclude:\n continue\n data[f.name] = f.value_from_object(instance)\n return data",
"def to_dict(self):\n\n # Check if is the right instance.\n if isinstance(self, db.Model):\n # construct a dictionary from column names and values.\n dict_representation = {c.name: getattr(self, c.name) for c in self.__table__.columns}\n return dict_representation\n else:\n raise AttributeError(type(self).__name__ + \" is not instance of \" + db.Model.__name__)",
"def model_info(self):\n if not self._model_info:\n self._load_model_info()\n try:\n data = json.loads(self._model_info)\n except (TypeError, ValueError):\n data = {}\n return data",
"def model_to_instance(model):\n pass",
"def model_attributes(self, app_label, model):\n model_name = model.__name__\n model_name_plural = self.model_name_plural(model)\n slug_field = self.get_unique_slug_field_name(model)\n slug_field_name = slug_field.name if slug_field else \"slug\"\n lookup_field = slug_field_name if slug_field else \"pk\"\n return {\n 'app_label': app_label,\n 'model': model,\n 'model_name': model_name,\n 'model_name_slug': self.camel_to_slug(model_name),\n 'model_name_plural': model_name_plural,\n 'model_name_plural_slug': self.camel_to_slug(model_name_plural),\n 'model_fields': self.get_field_names_for_model(model),\n 'slug_field': slug_field,\n 'slug_field_name': slug_field_name,\n 'lookup_field': lookup_field\n }",
"def botorch_model_to_dict(model: BoTorchModel) -> Dict[str, Any]:\n return {\n \"__type\": model.__class__.__name__,\n \"acquisition_class\": model.acquisition_class,\n \"acquisition_options\": model.acquisition_options or {},\n \"surrogate\": model._surrogates[Keys.ONLY_SURROGATE]\n if Keys.ONLY_SURROGATE in model._surrogates\n else None,\n \"surrogate_specs\": model.surrogate_specs\n if len(model.surrogate_specs) > 0\n else None,\n \"botorch_acqf_class\": model._botorch_acqf_class,\n \"refit_on_update\": model.refit_on_update,\n \"refit_on_cv\": model.refit_on_cv,\n \"warm_start_refit\": model.warm_start_refit,\n }",
"def to_dict(self):\n _dict = {}\n for f in self._meta.fields:\n if f.name == 'created':\n _dict[f.name] = str(f.value_from_object(self))\n else:\n _dict[f.name] = f.value_from_object(self)\n\n return _dict",
"def get_model_meta_info(model_name):\n return dict(dict(model_meta_info)[model_name])",
"def dict(self):\n return objToDict(self)",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def _to_dict(self):\n return self.to_dict()",
"def serialize(cls, model, *fields):\n\n if fields:\n raw_model = {field: model.get(field) for field in fields}\n\n else:\n raw_model = {field: model.get(field) for field in cls.fields}\n\n if 'posted_at' in raw_model:\n raw_model['posted_at'] = str(raw_model['posted_at']).split('.')[0]\n\n return raw_model",
"def to_obj(self):\n return dict()",
"def model_to_dict(model, mode=\"\", show_defaults=False):\n\n def is_mode(obj, mode):\n if mode == \"\":\n return True\n elif mode == \"config\":\n return obj._yang_name == \"config\" or obj._is_config\n elif mode == \"state\":\n return obj._yang_name == \"state\" or not obj._is_config\n else:\n raise ValueError(\n \"mode can only be config, state or ''. Passed: {}\".format(mode)\n )\n\n def get_key(key, model, parent_defining_module, show_defaults):\n if not show_defaults:\n # No need to display rw/ro when showing the defaults.\n key = \"{} {}\".format(key, \"[rw]\" if model._is_config else \"[ro]\")\n if parent_defining_module != model._defining_module:\n key = \"{}:{}\".format(model._defining_module, key)\n return key\n\n if model._yang_type in (\"container\", \"list\"):\n cls = model if model._yang_type in (\"container\",) else model._contained_class()\n result = {}\n for k, v in cls:\n r = model_to_dict(v, mode=mode, show_defaults=show_defaults)\n if r:\n result[get_key(k, v, model._defining_module, show_defaults)] = r\n return result\n else:\n if show_defaults:\n if model._default is False:\n if model._yang_type != \"boolean\":\n # Unless the datatype is bool, when the _default attribute\n # is False, it means there is not default value defined in\n # the YANG model.\n return None\n return model._default\n return model._yang_type if is_mode(model, mode) else None",
"def _to_dict(self):\n\t\treturn {'id': self.id,\n\t\t\t\t'name': self.name,\n\t\t\t\t'surname': self.surname}",
"def to_dict(self):\n return to_dict(self.__dict__)",
"def test_after_todict(self):\n my_model = BaseModel()\n new_model = BaseModel()\n test_dict = my_model.to_dict()\n self.assertIsInstance(my_model, BaseModel)\n self.assertEqual(type(my_model).__name__, \"BaseModel\")\n self.assertEqual(test_dict['__class__'], \"BaseModel\")\n self.assertTrue(type(test_dict['__class__']), 'str')\n self.assertTrue(type(test_dict['created_at']), 'str')\n self.assertTrue(type(test_dict['updated_at']), 'str')\n self.assertTrue(type(test_dict['id']), 'str')\n self.assertNotEqual(my_model.id, new_model.id)",
"def to_dict(self):\n ret = {}\n for key in dir(self):\n if key.startswith(\"_\"):\n continue\n\n if key in ['id', 'objects', 'pk', 'STRICT']:\n continue\n\n obj = getattr(self, key)\n if callable(obj):\n continue\n ret[key] = obj\n return ret",
"def instance_to_model(self):\n pass",
"def _get_model_state(self) -> dict:\n return dict(model=self.model, kwargs=self._model_kwargs)",
"def to_dict(self) -> Dict[str, Union[str, Number, dict, list]]:\n model = dict()\n model[\"name\"] = self.get_model_name()\n model[\"allocation_paradigm\"] = self.allocation_paradigm.name\n model[\"cpu_count\"] = self.cpu_count\n model[\"time_range\"] = self.time_range.to_dict()\n model[\"hydrofabric_data_id\"] = self.hydrofabric_data_id\n model[\"hydrofabric_uid\"] = self.hydrofabric_uid\n model[\"config_data_id\"] = self.config_data_id\n model[\"bmi_config_data_id\"] = self._bmi_config_data_id\n if self.catchments is not None:\n model[\"catchments\"] = self.catchments\n if self.partition_cfg_data_id is not None:\n model[\"partition_config_data_id\"] = self.partition_cfg_data_id\n\n return {\"model\": model, \"session-secret\": self.session_secret}",
"def to_dict(self) -> Dict:\n return {'object_id': self.object_id, 'data_id': self.data_id}",
"def getDict(self):\n res = {}\n for attr, value in self.__dict__.iteritems():\n if type(attr) is IntType or type(attr) is StringType or type(attr) is LongType or type(attr) is UnicodeType:\n res[attr] = value\n elif isinstance(attr, datetime.datetime):\n res[attr] = value.isoformat('-')\n \n return res",
"def _model_to_dict(models, *fields, **props):\n result = []\n fields = list(fields)\n\n is_many = isinstance(models, list)\n\n # terminate early if there is nothing to work on\n if not models:\n return [] if is_many else {}\n\n if not is_many:\n models = [models]\n\n assert isinstance(models[0], ActiveRecordMixin), \"Invalid ActiveRecord object\"\n\n if fields and len(fields) == 1:\n fields = [s.strip() for s in fields[0].split(',')]\n\n # pop of meta information\n # _overwrite = props.pop('_overwrite', None)\n _exclude = props.pop('_exclude', [])\n if isinstance(_exclude, basestring):\n _exclude = [e.strip() for e in _exclude.split(',')]\n\n # select columns given or all if non was specified\n model_attr = set(_get_columns(models[0]))\n if not model_attr & set(fields):\n fields = model_attr | set(fields)\n\n # correctly filter relation attributes and column attributes\n related_attr = set(fields) - model_attr\n model_attr = set(fields) - (set(_exclude) | related_attr)\n\n # check if there are relationships\n related_fields = _get_relations(models[0]).keys()\n related_map = {}\n # check if remaining fields are valid related attributes\n for k in related_attr:\n if '.' in k:\n index = k.index(\".\")\n model, attr = k[:index], k[index + 1:]\n if model in related_fields:\n related_map[model] = related_map.get(model, [])\n related_map[model].append(attr)\n elif k in related_fields:\n related_map[k] = []\n\n # no fields to return\n if not model_attr and not related_map:\n return {}\n\n for key in _primary_key_names(models[0]):\n model_attr.add(key)\n\n for model in models:\n data = {}\n # handle column attributes\n for k in model_attr:\n if k in getattr(model, '_attr_hidden', []):\n continue\n v = getattr(model, k)\n # change dates to human readable format\n data[k] = json_serialize(v)\n\n # handle relationships\n for k in related_map:\n val = getattr(model, k)\n fields = related_map[k]\n data[k] = _model_to_dict(val, *fields)\n\n # add extra properties\n for k in props:\n data[k] = props[k]\n if callable(data[k]):\n data[k] = data[k](model)\n\n # add to results\n result.append(data)\n\n # get correct response\n result = result if is_many else result[0]\n return result",
"def to_dict(self) -> dict:",
"def to_dict(self):\n\n dic = dict(**self.__dict__)\n dic['__class__'] = str(type(self).__name__)\n dic['created_at'] = self.created_at.isoformat()\n dic['updated_at'] = self.updated_at.isoformat()\n return (dic)",
"def serialize_model(result):\n relation_prefix = '/' # this prefix is added to fields that are a relation\n\n # Note: unloaded property is used to discard fields that are not loaded, ie. lazily loaded,\n # such as relationships (by default), and fields not specified in query select clause.\n selected = (lambda field: not field in sqlalchemy.orm.attributes.instance_state(result).unloaded)\n fields = list(filter(selected, result._sa_instance_state.attrs.keys()))\n\n object = {}\n for field in fields:\n\n try:\n value = getattr(result, field)\n except AttributeError:\n continue # we are permissive\n\n if not is_relationship(getattr(result.__class__, field)):\n object[field] = value\n\n else:\n if isinstance(value, sqlalchemy.orm.collections.InstrumentedList):\n # ..n relationship: value is a list of sqla models\n object[relation_prefix + field] = list(map(lambda result: result.id, value))\n\n elif isinstance(type(value), sqlalchemy.ext.declarative.api.DeclarativeMeta):\n # ..1 relationship: value is a sqla model\n object[relation_prefix + field] = value.id\n\n else:\n # ..1 relationship: value shall be empty\n object[relation_prefix + field] = value\n\n return object",
"def test_after_to_dict(self):\n my_model = City()\n new_model = City()\n test_dict = my_model.to_dict()\n self.assertIsInstance(my_model, City)\n self.assertEqual(type(my_model).__name__, \"City\")\n self.assertEqual(test_dict['__class__'], \"City\")\n self.assertTrue(type(test_dict['__class__']), 'str')\n self.assertTrue(type(test_dict['created_at']), 'str')\n self.assertTrue(type(test_dict['updated_at']), 'str')\n self.assertTrue(type(test_dict['id']), 'str')\n self.assertNotEqual(my_model.id, new_model.id)",
"def to_dict(self) -> Dict[str, Any]:\n return self.__dict__.copy()",
"def getDict(cls, obj):\n\t\treturn obj.__dict__",
"def get_model_data(request):\n modelname = request.matchdict['modelname']\n # Check that model is defined\n exists = db_model_definition(request.db)[modelname]\n if not exists:\n raise NotFound(\"Unknown model %s\" % modelname)\n # Return array of records\n results = db_model_data(request.db)[modelname]\n # TODO: should we transmit uuids or keep them secret for editing\n data = [result.value for result in results]\n return {'data': data}",
"def to_model(self, obj):\n if obj is None:\n raise UnprocessableEntity(\"expected data in request, was empty\", what=BAD_VALUE)\n \n if not isinstance(obj, Mapping):\n raise UnprocessableEntity(\"expected data object in request\", what=BAD_VALUE)\n \n return {k: self.cols_to_model[k](v) for k, v in obj.items() if k in self.cols_to_model}",
"def to_dict(self):\n serialized = self._serializer().dump(self)\n return serialized.data",
"def as_dict(self):\n return asdict(self)",
"def to_dict(self):\r\n return self.__dict__",
"def to_dict(self):\n\n dic = self.__dict__.copy()\n dic['__class__'] = self.__class__.__name__\n dic['created_at'] = self.created_at.isoformat()\n dic['updated_at'] = self.updated_at.isoformat()\n\n return (dic)",
"def model_dict(self) -> dict:\n model_dict = dict()\n for key, value in self.kwargs.items():\n current = model_dict\n if not isinstance(key, tuple):\n key = (key,)\n for part in key[:-1]:\n if part not in current:\n current[part] = dict()\n current = current[part]\n current[key[-1]] = value\n return model_dict",
"def to_dict(self):",
"def get_storage_model(self) -> Dict[str, Any]:\n return self.data.dict()",
"def to_dict(self):\n dicto = self.__dict__.copy()\n dicto['created_at'] = self.created_at.strftime(\"%Y-%m-%dT%H:%M:%S.%f\")\n dicto['updated_at'] = self.updated_at.strftime(\"%Y-%m-%dT%H:%M:%S.%f\")\n dicto['__class__'] = self.__class__.__name__\n return dicto",
"def to_dict(self):\n return self.__dict__",
"def to_dict(self):\n return self.__dict__",
"def to_dict(self):\n return self.__dict__",
"def to_dict(self):\n return self.__dict__",
"def to_dict(self):\n return self.__dict__",
"def to_dict(self):\n return self.__dict__"
] | [
"0.74951124",
"0.74951124",
"0.74951124",
"0.7297441",
"0.71069056",
"0.70827776",
"0.701292",
"0.69411165",
"0.67609876",
"0.6588779",
"0.65625906",
"0.6549681",
"0.65171313",
"0.6515657",
"0.6490812",
"0.6461443",
"0.6450688",
"0.64377403",
"0.6435162",
"0.64019275",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.63385475",
"0.6250414",
"0.62413853",
"0.6237404",
"0.6236847",
"0.62313366",
"0.6228077",
"0.6223342",
"0.6219889",
"0.6208243",
"0.6184072",
"0.618078",
"0.61702543",
"0.6167393",
"0.61607885",
"0.61516917",
"0.6150789",
"0.61156017",
"0.61136144",
"0.60900927",
"0.60890216",
"0.6086104",
"0.608424",
"0.60823035",
"0.6077697",
"0.60682297",
"0.60619015",
"0.6060021",
"0.60413814",
"0.603324",
"0.6027578",
"0.6027578",
"0.6027578",
"0.6027578",
"0.6027578",
"0.6027578"
] | 0.6789229 | 8 |
Returns a JSON response, transforming 'context' to make the payload. | def render_to_json_response(self, context, **response_kwargs):
response_kwargs.update(dict(json_dumps_params=dict(ensure_ascii=False)))
return JsonResponse(self.safe_json(context), **response_kwargs) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def render_to_response(self, context):\n return self.get_json_response(self.convert_context_to_json(context))",
"def render_to_response(self, context):\n\t\treturn self.get_json_response(self.convert_context_to_json(context))",
"def render_to_json_response(self, context, **response_kwargs):\n return HttpResponse(\n self.convert_context_to_json(context),\n content_type='application/json',\n **response_kwargs\n )",
"def render_to_json_response(self, context, **response_kwargs):\n return HttpResponse(\n self.convert_context_to_json(context),\n content_type='application/json',\n **response_kwargs\n )",
"def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(self.get_data(context))",
"def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(self.get_data(**context), **response_kwargs)",
"def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(\n self.get_data(context),\n **response_kwargs\n )",
"def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(\n self.get_data(context),\n **response_kwargs\n )",
"def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(\n self.get_data(context),\n **response_kwargs\n )",
"def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(\n self.get_data(context),\n **response_kwargs\n )",
"def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(\n self.get_data(context),\n **response_kwargs\n )",
"def render_to_response(self, context, **response_kwargs):\n return JsonResponse(context)",
"def convert_context_to_json(self, context):\n\t\t# Note: This is *EXTREMELY* naive; in reality, you'll need\n\t\t# to do much more complex handling to ensure that arbitrary\n\t\t# objects -- such as Django model instances or querysets\n\t\t# -- can be serialized as JSON.\n\t\treturn json.dumps(context)",
"def convert_context_to_json(self, context):\n # Note: This is *EXTREMELY* naive; in reality, you'll need\n # to do much more complex handling to ensure that arbitrary\n # objects -- such as Django model instances or querysets\n # -- can be serialized as JSON.\n return json.dumps(context)",
"def convert_context_to_json(self, context):\n return json.dumps(context, indent=4)",
"def render_response(self, context):\n\n # if object is a string just return as is\n if isinstance(context, basestring):\n self.response.write(context)\n # else attempt to serialise and return\n else:\n context = json.dumps(context)\n self.response.write(context)\n # set the right content-type header\n self.response.headers['Content-Type'] = 'application/json'",
"def convert_context_to_json(self, context):\n name = self.get_json_callback_parameter_name()\n context = self.inject_debug_info(context)\n obj = self.json_encoder.encode(context)\n if name:\n return \"%s(%s)\" % (name, obj)\n return obj",
"def render_body(self, context):\n if self.body_template:\n return render_jinja2(self.body_template, context)\n else:\n return json.dumps(context, cls=JSONEncoder)",
"def json_response(self, request, *args, **kwargs):\n\n return HttpResponse(self.construct_json(),\n content_type='application/json',\n mimetype='application/json', status=self.status)",
"def get_data(self, context):\n # Note: This is *EXTREMELY* naive; in reality, you'll need\n # to do much more complex handling to ensure that arbitrary\n # objects -- such as Django model instances or querysets\n # -- can be serialized as JSON.\n return context",
"def get_data(self, context):\n # Note: This is *EXTREMELY* naive; in reality, you'll need\n # to do much more complex handling to ensure that arbitrary\n # objects -- such as Django model instances or querysets\n # -- can be serialized as JSON.\n return context",
"def get_data(self, context):\n # Note: This is *EXTREMELY* naive; in reality, you'll need\n # to do much more complex handling to ensure that arbitrary\n # objects -- such as Django model instances or querysets\n # -- can be serialized as JSON.\n return context",
"def get_data(self, context):\n # Note: This is *EXTREMELY* naive; in reality, you'll need\n # to do much more complex handling to ensure that arbitrary\n # objects -- such as Django model instances or querysets\n # -- can be serialized as JSON.\n return context",
"def render_to_response(self, context, **response_kwargs):\n json = self.convert_context_to_json(context)\n # If callback is specified, serve as JSONP\n callback = self.request.GET.get('callback', None)\n if callback:\n response_kwargs['content_type'] = 'application/javascript'\n json = \"%s(%s);\" % (callback, json)\n return self.response_class(json, **response_kwargs)",
"def get_data(self, **context):\n # Note: This is *EXTREMELY* naive; in reality, you'll need\n # to do much more complex handling to ensure that arbitrary\n # objects -- such as Django model instances or querysets\n # -- can be serialized as JSON.\n return context",
"def render_to_response(self, context):\n if self.get_is_ajax():\n return self.get_json_response(self.get_json_content_prefix() + self.convert_context_to_json(context))\n return super(AjaxJsonResponseMixin, self).render_to_response(context)",
"def render_to_response(self, context, **kwargs):\n kwargs = {}\n additional_headers = {}\n #create response headers\n if 'header' in context:\n for key in context['header']:\n if key == 'Content-Type':\n kwargs['content_type'] = context['header'][key]\n elif key.lower() == 'status':\n kwargs['status'] = context['header'][key]\n else:\n additional_headers[key] = context['header'][key]\n del context['header']\n \n #return json if not header\n if not 'content_type' in kwargs:\n kwargs['content_type'] = 'application/json'\n \n if 'pointer' in context: #return file\n context['pointer'].seek(0)\n kwargs['content'] = context['pointer'].read()\n context['volume'].close(context['pointer'], context['info']['hash'])\n elif 'raw' in context and context['raw'] and 'error' in context and context['error']: #raw error, return only the error list\n kwargs['content'] = context['error']\n elif kwargs['content_type'] == 'application/json': #return json\n kwargs['content'] = json.dumps(context)\n else: #return context as is!\n kwargs['content'] = context\n \n response = HttpResponse(**kwargs)\n for key, value in additional_headers.items():\n response[key] = value\n\n return response",
"def safe_json(self, context):\n serialize_context = dict()\n for key, obj in context.items():\n if isinstance(obj.__class__, ModelBase):\n if hasattr(obj, 'serialize') and callable(getattr(obj, 'serialize')):\n serialize_context[key] = obj.serialize()\n else:\n serialize_context[key] = model_to_dict(obj)\n elif isinstance(obj, QuerySet):\n serialize_context[key] = [o.serialize() for o in obj if hasattr(o, 'serialize')]\n if len(serialize_context[key]) != len(obj):\n serialize_context[key] = [model_to_dict(o) for o in obj]\n elif key == 'extra':\n serialize_context[key] = obj\n # elif key == 'view':\n # continue\n # else:\n # serialize_context[key] = obj\n return dict(success=True, data=serialize_context)",
"def gen_response(data):\n return jsonify({\n \"response\": data\n })",
"def print_json(res, ctx):\n\n return json.dumps(res)",
"def get(self) -> Response:\n response = {\"@context\": get_doc().entrypoint.context.generate()}\n return set_response_headers(jsonify(response))",
"def JSONResponse(payload):\n return HttpResponse(json.dumps(payload), mimetype='application/json')",
"def render_to_json_response(self, data: Optional[Dict] = {}, meta: Optional[Dict] = {},\n error: Optional[str] = '', status=HTTPStatus.OK, **response_kwargs):\n response_data = {\"body\": data, \"meta\": meta, \"error\": error}\n return JsonResponse(response_data, status=status, **response_kwargs)",
"def json(self, **kwargs):\n\t\ttry:\n\t\t\treturn self.response.json(**kwargs)\n\t\texcept ValueError:\n\t\t\t# No valid JSON encoding\n\t\t\treturn None",
"def json_response(data):\n return current_app.response_class(\n json.dumps(data),\n mimetype=\"application/json\"\n )",
"def jsonify(self, obj, status_code=200):\n assert isinstance(obj, dict)\n\n response = jsonify(obj)\n response.status_code = status_code\n return response",
"def xjsonify(obj):\n\n return Response(json.dumps(obj), mimetype='application/json')",
"def json(self):\n if self.resource:\n return jsonify(self.resource)\n return jsonify({'id': self.id})",
"def json_response(*args, **kwargs):\n data = stringify(*args, **kwargs)\n return Response(data, mimetype='application/json')",
"def get_json_response(self, content, **httpresponse_kwargs):\n\t\treturn HttpResponse(content,\n\t\t\t\t\t\t\t\t content_type='application/json',\n\t\t\t\t\t\t\t\t **httpresponse_kwargs)",
"def to_http_response(self) -> HttpResponse:\n response = (\n JsonResponse(self.body)\n if (self.headers or {}).get(\"Content-Type\") == \"application/json\"\n else HttpResponse(self.body)\n )\n response.headers = self.headers\n return response",
"def format(self, *args):\n\t\tweb.header('Content-Type', 'application/json; charset=utf-8')\n\t\treturn json.dumps(self.content)",
"def json(self) -> Any:\n return self.body.json()",
"def get_json_response(obj):\n return HttpResponse(json.dumps(obj))",
"def get_json_response(obj):\n return HttpResponse(json.dumps(obj))",
"def json_response(obj):\n return HttpResponse(json.dumps(obj), content_type=\"application/json\")",
"def json_response(content=\"\", status=\"OK\", status_code=200, error=\"\"):\n wrapper = {\n 'status': status,\n 'status_code': status_code,\n 'output': content,\n 'error': error\n }\n response = json.dumps(wrapper, cls=DjangoJSONEncoder, indent=4)\n return HttpResponse(response, content_type='application/json', status=status_code)",
"def json_response(func):\n\t@wraps(func)\n\tdef decorated_view(*args, **kwargs):\n\t\tdata = func(*args, **kwargs)\n\t\tdata = json.dumps(data)\n\t\tresponse = make_response(data)\n\t\tresponse.headers['Content-Type'] = 'application/json'\n\t\treturn response\n\treturn decorated_view",
"def response_with(response, status=200):\n return make_response(jsonify(response), status)",
"def response(self):\n response = {}\n if self.stats is not None:\n response = self.stats\n\n return json.dumps(response)",
"def make_response(status=200, content=None):\n\n return current_app.response_class(json.dumps(content,\n indent=None if request.is_xhr else 2), mimetype='text/plain')",
"def jsonresp(value):\n body = (json.dumps(value),)\n cherrypy.response.headers['Content-Type'] = 'application/json'\n return body",
"def make_response(status=200, content=None):\n\n return current_app.response_class(json.dumps(content,\n indent=None if request.is_xhr else 2),\n mimetype='text/plain')",
"def jsonify(obj, status=200, headers=None):\n jsondata = json.dumps(obj, cls=JSONEncoder)\n\n if 'callback' in request.args:\n jsondata = '%s(%s)' % (request.args.get('callback'), jsondata)\n\n return Response(jsondata, headers=headers, status=status, mimetype='application/json')",
"def inner(*args, **kwargs):\n return Response(\n dumps(function(*args, **kwargs)),\n mimetype='application/json'\n )",
"def _openapi_json(self):\n # We don't use Flask.jsonify here as it would sort the keys\n # alphabetically while we want to preserve the order.\n from pprint import pprint\n pprint(self.to_dict())\n return current_app.response_class(json.dumps(self.to_dict(), indent=4),\n mimetype='application/json')",
"def _convert_to_JSON(result):\n response = make_response(json.dumps(result))\n response.headers['Access-Control-Allow-Origin'] = \"*\"\n response.mimetype = \"application/json\"\n return response",
"def reponse(self, data):\n response = self.response\n response.headers['Content-Type'] = 'application/json'\n json.dump(data, response.out)\n return response",
"def json_response( json_object ):\n return HttpResponse( json.dumps(json_object) )",
"def __call__(self, rv):\n if isinstance(rv, ResponseBase):\n return rv\n data, status, headers = unpack(rv)\n resp = flask.make_response(self._encoder(data, **self.json_settings),\n status, {'Content-Type': self.content_type})\n resp.headers.extend(headers)\n return resp",
"def response_json(data, status=200):\n\n response = jsonify(**data)\n response.status_code = status\n\n return response",
"def J(*args, **kwargs):\n response = jsonify(*args, **kwargs)\n response.mimetype = 'application/vnd.api+json'\n return response",
"def json(self, **kwargs):\n return json.loads(self.content, **kwargs)",
"def wrapper(*args, **kwargs):\n response = {\n \"meta\": {\n \"status\": kwargs.pop(\"status\", True),\n \"verbose\": kwargs.pop(\"verbose\", \"OK\")\n },\n \"content\": None\n }\n if not response[\"meta\"][\"status\"]:\n cherrypy.response.headers['Content-Type'] = 'application/json'\n cherrypy.response.status = 400\n return json.dumps(response)\n return method(*args, **kwargs)",
"def get_json_response(self, content, **httpresponse_kwargs):\n response = HttpResponse(content,\n content_type='application/json',\n **httpresponse_kwargs)\n return response",
"def render_to_response(self, context, **response_kwargs):\n return self.response_class(\n request = self.request,\n template = self.get_template_names(),\n context = context,\n **response_kwargs\n )",
"def to_response_data(self) -> typing.Any:\n return json.dumps(self.value, cls=ToJsonEncoder)",
"def render_json(object):\r\n return HttpResponse(jsonify(object), content_type='application/json')",
"def jsonify(f):\n @wraps(f)\n def decorated_function(*args, **kwargs):\n result = f(*args, **kwargs)\n data = json.dumps(result, indent=None if request.is_xhr else 2)\n return app.response_class(data, mimetype='application/json')\n return decorated_function",
"def body(self):\n return json.dumps(self.data, cls=ServerlessJsonEncoder)",
"def json_out(self, data):\n\t\treturn json.dumps(data)",
"def render_json(self, obj):\n self.response.content_type = \"application/json\"\n self.response.out.write(json.encode(obj))",
"def rest(method, path, context, body=None):\n\n headers = {\n \"Accept\": \"*/*\",\n \"Content-Type\": \"application/json; charset=utf-8\",\n }\n domain = context['headers']['domain']\n auth = get_auth(context)\n url = get_url(domain, path)\n\n response = requests.request(method, url, headers=headers, json=body, auth=auth)\n\n if response.status_code >= 400:\n raise Exception(\"Error: \", response.text)\n\n return json.loads(response.text)",
"def output_json(data, code, headers=None):\n #data[\"timestamp\"] = datetime.now()\n return jsonify(data)",
"def api_response():\n\n data = {\n 'hello': 'world',\n 'number': 12\n }\n\n js = json.dumps(data)\n # (@data, @status_code (200 by default), @data_type)\n resp = Response(response=js, status=200, mimetype='application/json')\n\n # Using jsonify to simplify syntax, returns exactly the same flask-Response object\n # from flask import jsonify\n # resp = jsonify(data)\n # resp.status_code = 200\n\n return resp",
"def adrest_jsonify(content, **options):\r\n from adrest.utils.serializer import JSONSerializer\r\n worker = JSONSerializer(**options)\r\n return worker.serialize(content)",
"def json(self) -> dict:\n return {\n 'id': self.id,\n 'requestType': self.request_type.name,\n 'isProcessed': self.is_processed,\n 'serviceName': self.service_name.name,\n 'isAdmin': self.is_admin,\n 'creationDate': LegislationDatetime.as_legislation_timezone(self.creation_date).isoformat()\n }",
"def dispatch(self, request, *args, **kwargs):\n # Wrap the dispatch method, so that we autoencode JSON\n response = super(JSONRestView, self).dispatch(request, *args, **kwargs)\n # If this is not an HTTPResponseBase object (Base class for responses) \n if not isinstance(response, HttpResponseBase):\n response = json_response(response)\n\n return response",
"def json_response(self, out, code=200):\n self.response.set_status(code)\n self.response.headers[CONTENT_TYPE] = CONTENT_TYPE_JSON\n self.response.out.write(json.dumps(out))",
"def create_response(result):\n return ControllerResponse(\n response=result,\n status=200,\n mime='application/json',\n jsonize=True,\n )",
"def to_response(self, data):\n return self.from_dict(data).to_dict()",
"def _to_json_dict(self, path_context, memo=None):\n return {'tag': self.tag(), 'data':\n {'meta': self._meta.save(path_context, memo=memo)}}",
"def json(data, indent=False):\n return Response(simplejson.dumps(data, indent=indent),\n content_type='application/json')",
"def jsonify(function):\n @wraps(function)\n def inner(*args, **kwargs):\n \"\"\"\n This docstring will be overridden by @wraps decorator.\n \"\"\"\n return Response(\n dumps(function(*args, **kwargs)),\n mimetype='application/json'\n )\n return inner",
"def get_body(self, environ=None):\n body = {\n 'success': False,\n 'data': {\n 'code': self.code,\n 'msg': self.msg,\n 'path': request.full_path,\n }\n }\n return json.dumps(body)",
"def json(self):\n class ExtendedJSONEncoder(json.JSONEncoder):\n def default(self, obj):\n if isinstance(obj, datetime.date) or isinstance(obj, datetime.time):\n encoded_object = obj.isoformat()\n else:\n encoded_object = json.JSONEncoder.default(self, obj)\n return encoded_object\n\n obj = {\n 'operation': self.operation,\n 'version': self.version,\n 'language': self.language,\n 'identifiers': self.identifiers,\n 'store_execute': self.store_execute,\n 'status': self.status,\n 'lineage': self.lineage,\n 'inputs': dict((i, [inpt.json for inpt in self.inputs[i]]) for i in self.inputs),\n 'outputs': self.outputs,\n 'raw': self.raw\n }\n\n return json.dumps(obj, allow_nan=False, cls=ExtendedJSONEncoder)",
"def dump_json(request, obj):\n return obj",
"def to_json(self):\n json_request = {}\n\n if self._course_id:\n json_request['scope'] = {\n 'typeName': 'courseContext',\n 'definition': {\n 'courseId': self._course_id\n }}\n elif self._partner_id:\n json_request['scope'] = {\n 'typeName': 'partnerContext',\n 'definition': {\n 'partnerId': {\n 'maestroId': self._partner_id\n }}}\n elif self._group_id:\n json_request['scope'] = {\n 'typeName': 'groupContext',\n 'definition': {\n 'groupId': self._group_id\n }}\n if self._export_type:\n json_request['exportType'] = self._export_type\n if self._anonymity_level:\n json_request['anonymityLevel'] = self._anonymity_level\n if self._statement_of_purpose:\n json_request['statementOfPurpose'] = self._statement_of_purpose\n if self._schema_names:\n json_request['schemaNames'] = self._schema_names\n if self._interval:\n json_request['interval'] = {\n 'start': self._interval[0], 'end': self._interval[1]}\n if self._ignore_existing:\n json_request['ignoreExisting'] = self._ignore_existing\n\n return json_request",
"def jsonify(self):\n jsonObject = self.getJsonObject()\n return json.dumps(jsonObject)",
"def json_view(self, recursive=False):\n\n context = self.context.aq_inner\n data = self.export(context, recursive=recursive)\n pretty = json.dumps(data, sort_keys=True, indent=4)\n self.request.response.setHeader(\"Content-type\", \"application/json\")\n return pretty",
"def to_lambda_output(self):\n resp = {\n 'statusCode': self.status_code,\n 'body': self.body,\n 'headers': self.headers\n }\n\n return resp",
"def _writeJSONResponse(result, request, code=CODE.SUCCESS, status=http.OK):\n response = {\n u'code': code.value,\n u'result': result}\n request.setHeader('content-type', 'application/json')\n request.setResponseCode(status)\n request.write(json.dumps(response))\n request.finish()",
"def json_response(self, response_data):\n resp_obj = json_dumps(response_data, default=encode_ion_object, indent=None if request.is_xhr else 2)\n resp = self.response_class(resp_obj, mimetype=CONT_TYPE_JSON)\n if self.develop_mode and (self.set_cors_headers or (\"api_key\" in request.args and request.args[\"api_key\"])):\n self._add_cors_headers(resp)\n self._log_request_response(CONT_TYPE_JSON, resp_obj, len(resp_obj))\n return resp",
"def copy_response(context):\n context.response_copy = context.response.json()\n logging.debug('Successfully copied the response')",
"def get(self):\n self.finish(json.dumps(self.build_response_dict()))",
"def to_response(self):\n raise NotImplementedError(\"Must define to_response on `%s`\" % self.__class__.__name__)",
"def to_response(self):\n return make_response(self.res, self.status)",
"def to_response(self):\n return make_response(self.res, self.status)",
"def output_json(data, code, headers=None):\n response = make_response(json.dumps(data), code)\n response.headers.extend(headers or {})\n return response",
"def _create_response_json(data):\n r = make_response(json.dumps(data, ensure_ascii=False, default=json_util.default))\n r.headers['Access-Control-Allow-Origin'] = \"*\"\n r.headers['Content-Type'] = \"application/json; charset=utf-8\"\n return r"
] | [
"0.7898263",
"0.7824009",
"0.76766837",
"0.76766837",
"0.7560496",
"0.75058556",
"0.74115",
"0.74115",
"0.74115",
"0.74115",
"0.74115",
"0.73821956",
"0.73662317",
"0.7348336",
"0.7309291",
"0.7130906",
"0.6941217",
"0.6892298",
"0.6722085",
"0.663044",
"0.663044",
"0.663044",
"0.663044",
"0.6624278",
"0.66101027",
"0.65830445",
"0.64999884",
"0.63533854",
"0.6276619",
"0.6244942",
"0.62242484",
"0.6185351",
"0.61124665",
"0.6095673",
"0.6079476",
"0.60646486",
"0.6040115",
"0.60369045",
"0.6030418",
"0.602617",
"0.59651273",
"0.59499395",
"0.5935385",
"0.5922817",
"0.5922817",
"0.5909626",
"0.58728576",
"0.5866613",
"0.5858048",
"0.581373",
"0.580704",
"0.5790782",
"0.57808477",
"0.5771479",
"0.5767885",
"0.5765478",
"0.5764835",
"0.5750403",
"0.57366604",
"0.573276",
"0.5710533",
"0.5709495",
"0.5698101",
"0.5691809",
"0.56745875",
"0.56657904",
"0.5650932",
"0.5644281",
"0.56356055",
"0.5634355",
"0.56320304",
"0.56313527",
"0.56293684",
"0.56206036",
"0.56134605",
"0.5596078",
"0.5585305",
"0.5581392",
"0.55755895",
"0.55665374",
"0.55564475",
"0.5553354",
"0.5552204",
"0.5552147",
"0.5547425",
"0.5542024",
"0.5534814",
"0.5525277",
"0.55243635",
"0.55157703",
"0.5512581",
"0.5495652",
"0.5493998",
"0.54913086",
"0.54897416",
"0.5483464",
"0.5480061",
"0.5480061",
"0.5466442",
"0.54654056"
] | 0.7347456 | 14 |
Returns an object that will be serialized as JSON by json.dumps(). | def safe_json(self, context):
serialize_context = dict()
for key, obj in context.items():
if isinstance(obj.__class__, ModelBase):
if hasattr(obj, 'serialize') and callable(getattr(obj, 'serialize')):
serialize_context[key] = obj.serialize()
else:
serialize_context[key] = model_to_dict(obj)
elif isinstance(obj, QuerySet):
serialize_context[key] = [o.serialize() for o in obj if hasattr(o, 'serialize')]
if len(serialize_context[key]) != len(obj):
serialize_context[key] = [model_to_dict(o) for o in obj]
elif key == 'extra':
serialize_context[key] = obj
# elif key == 'view':
# continue
# else:
# serialize_context[key] = obj
return dict(success=True, data=serialize_context) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def toJSON(cls, obj):\n return json.dumps(obj)",
"def serialize(self, obj):\n return json.dumps(obj)",
"def toJSON(self):\n return json.dumps(self, default=lambda o: o.__dict__)",
"def to_json(self, *args, **kwargs):\n return json.dumps(self.serialize(primitive=True), *args, **kwargs)",
"def jsonify(self):\n jsonObject = self.getJsonObject()\n return json.dumps(jsonObject)",
"def to_json(self, *args, **kwargs):\n return json.dumps(self.serialize(), *args, **kwargs)",
"def jsonify(obj):\n return json.loads(json.dumps(obj, default=default_encoder))",
"def to_json(self):\n return json.dumps(self._asdict())",
"def serialize(cls, obj):\n return json.dumps(obj, cls=CustomTypeEncoder)",
"def json_encode(obj):\n return json.dumps(obj)",
"def encode_json(obj):\n\treturn json.dumps(obj)",
"def json_serialize(self):\n raise NotImplementedError('json_serialize must be overriden')",
"def serialize(self):\n return json.dumps(self.as_dict())",
"def toJSON(object):\n\treturn json.dumps(object, ensure_ascii=False)",
"def json(self):\n return json.dumps(self, default=lambda o: o.__dict__,\n sort_keys=True, indent=4)",
"def json (self):\n\n return jsonpickle.encode(self, unpicklable=False)",
"def json (self):\n\n return jsonpickle.encode(self, unpicklable=False)",
"def jsonify(obj):\n raise NotImplementedError",
"def json(self):\n class ExtendedJSONEncoder(json.JSONEncoder):\n def default(self, obj):\n if isinstance(obj, datetime.date) or isinstance(obj, datetime.time):\n encoded_object = obj.isoformat()\n else:\n encoded_object = json.JSONEncoder.default(self, obj)\n return encoded_object\n\n obj = {\n 'operation': self.operation,\n 'version': self.version,\n 'language': self.language,\n 'identifiers': self.identifiers,\n 'store_execute': self.store_execute,\n 'status': self.status,\n 'lineage': self.lineage,\n 'inputs': dict((i, [inpt.json for inpt in self.inputs[i]]) for i in self.inputs),\n 'outputs': self.outputs,\n 'raw': self.raw\n }\n\n return json.dumps(obj, allow_nan=False, cls=ExtendedJSONEncoder)",
"def dump_to_json (self):\n return json.loads(self.dump())",
"def _toJSON(self):\n\n return json.encode(self.__toJSON())",
"def to_json(self) -> JSON:\n pass",
"def GetJSON(self):\n return json.dumps(self.GetDict())",
"def to_json(self, *args, **kwargs):\n data = self.to_dict()\n\n return json_util.dumps(data)",
"def EventToJSON(_object):\n return json.dumps(_object, default=jsonDefault)",
"def serialize(self, as_str=True) -> Union[dict, str]:\n response = self.to_obj()\n\n if as_str:\n response = json.dumps(response)\n\n return response",
"def to_json(self):\n return json.dumps(self, default=lambda i: i.__dict__)",
"def as_json(self):",
"def cls2json(self):\n return json.dumps(self.__dict__)",
"def cls2json(self):\n return json.dumps(self.__dict__)",
"def to_json(self, **kwargs):\n return dumps(self, **kwargs)",
"def to_json(self) :\n return jsonpickle.encode(self)",
"def to_json(self):\n return json.dumps(self.serialize())",
"def jsonify(object):\n # note: ng provides a \"json\" filter that can do this too\n # note: but Django doesn't [https://code.djangoproject.com/ticket/17419]\n if isinstance(object, QuerySet):\n return serialize('json', object)\n return json.dumps(object)",
"def serialize(self) -> str:\n return json.dumps(self.__dict__)",
"def jsonDefault(object):\n return object.__dict__",
"def serialize(self) -> bytes:\n return json_dumps(self._to_dict()).encode()",
"def serialize(self) -> bytes:\n return json_dumps(self._to_dict()).encode()",
"def serialize(self) -> bytes:\n return json_dumps(self._to_dict()).encode()",
"def toJson(self):\r\n return self.__dict__",
"def to_json(self):\n return json.dumps(self.for_json())",
"def to_json(self):\n return json.dumps(self.__dict__)",
"def to_json_string(my_obj):\n obj = j.dumps(my_obj)\n return obj",
"def to_init_json(self) -> JSON:\n pass",
"def tojson(self):\n return json.dumps(self.jsonable())",
"def json_encode(obj, **kwargs):\n return json.dumps(obj, default=json_default, **kwargs)",
"def as_json(self):\n return json.dumps(self.as_dict())",
"def as_json(self):\n return json.dumps(self.as_dict())",
"def as_json(self):\n return json.dumps(self.as_dict())",
"def SerializeObject(self, data):\n\n if isinstance(data,dict):\n serializad_data = json.dumps(data)\n else:\n serializad_data = json.dumps(data.__dict__)\n\n return serializad_data",
"def to_json(self):\n obj_dict = self.__dict__\n obj_dict[\"_class_\"] = self.__class__.__name__\n return obj_dict",
"def jsonify(obj):\n return Response(json.dumps(obj, cls=MongoJsonEncoder), mimetype='application/json')",
"def jsonify(obj):\n d = model_to_dict(obj)\n return json.dumps(d, cls=LazyEncoder)",
"def convert_to_json(self):\n return self.__dict__",
"def to_json_string(my_obj):\n return (json.dumps(my_obj))",
"def to_json_string(my_obj):\n return (json.dumps(my_obj))",
"def json(self) -> Any:\n return json.loads(self)",
"def tojson(self) -> ty.Text:\n return json.dumps(self.todict())",
"def to_json(self) -> str:\n return json.dumps(asdict(self))",
"def json_friendly(self):",
"def to_json_string(my_obj):\n return json.dumps(my_obj)",
"def to_json_string(my_obj):\n return json.dumps(my_obj)",
"def to_json_string(my_obj):\n return json.dumps(my_obj)",
"def to_json_string(my_obj):\n return json.dumps(my_obj)",
"def to_json(self):\n return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)",
"def to_json(self):\n return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)",
"def to_json(self):\n return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)",
"def to_json_string(my_obj):\n j_obj = json.dumps(my_obj)\n return j_obj",
"def toJson(self):\n return json.dumps(self.toDict())",
"def toJson(self):\n return json.dumps(self.toDict())",
"def to_string(self):\n return json.dumps(self.to_json(), cls=ObjectEncoder)",
"def json(self, pretty=True, full_dump=True):\n return json_dump(self, pretty, full_dump)",
"def jsonizable_object(self):\n obj = {\n 'title': self.title,\n 'url': self.url,\n 'abstract': self.abstract\n }\n if self.metadata:\n obj['metadata'] = self.metadata\n return obj",
"def get_json_response(obj):\n return HttpResponse(json.dumps(obj))",
"def get_json_response(obj):\n return HttpResponse(json.dumps(obj))",
"def json(self, data):\n import json\n data = json.dumps(data)\n return data",
"def json_encode(data):\n return json.dumps(data, cls=JSONEncoder)",
"def to_json_string(my_obj):\n\n return json.dumps(my_obj)",
"def to_json(self):\n return json.dumps(self.to_dict())",
"def to_json(self):\n return json.dumps(self.to_dict())",
"def to_json(self):\n return json.dumps(self.dict)",
"def json(self):\n return self.__json",
"def to_json(self):\n return json.dumps(self, default=json_converter, indent=2)",
"def pack(self, obj):\n # TODO: use a JSON encoder that handles more types?\n if obj is not None:\n return json.dumps(obj)",
"def to_json(self):\n return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True,\n indent=4)",
"def to_json(self):\n return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True,\n indent=4)",
"def toJSON(self):\n\t\treturn json.dumps(self.root, default=lambda o: o.__dict__)",
"def to_json(self):\n return self.__dict__",
"def to_json(self):\n return self.__dict__",
"def to_json(self):\n return self.__dict__",
"def to_json(self):\n return self.__dict__",
"def to_json(self):\n pass",
"def __toJSON(self):\n context = jsonpickle.pickler.Pickler(unpicklable=True, warn=True, keys=True)\n json_state = context.flatten(self, reset=True)\n\n jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)\n\n # Add version and hash of dictionary json_state\n json_state[\"__version__\"] = self.__version__\n json_state[\"__sha256__\"] = hash_sha256(json.encode(json_state))\n\n # Add user and datetime information afterwards\n json_state[\"__user__\"] = getpass.getuser()\n dt = datetime.now()\n json_state[\"__datetime__\"] = dt.strftime(\"%A, %d. %B %Y %I:%M%p\")\n\n return json_state",
"def toJson(self):\n return json.dumps(self.toDict(), default=str)",
"def __str__(self):\n return json.dumps(self.to_dict())",
"def serialize(self):\n\n if util.config['redfish']['indent_json']:\n indent = 4\n else:\n indent = None\n return json.dumps(\n self.redfish,\n default=lambda o: o.__dict__,\n sort_keys=False,\n indent=indent)",
"def toJSON(self):\n raise NotImplementedError()",
"def __json_init__(cls, **kwargs):\n return cls(**kwargs)",
"def jsonify(obj, encoding=True):\n if encoding:\n return ujson.dumps(obj, ensure_ascii=False).encode('utf8')\n else:\n return ujson.dumps(obj, ensure_ascii=False)",
"def tojson(python_object):\n return json.JSONEncoder().encode(python_object)",
"def dump_json(request, obj):\n return obj"
] | [
"0.78144574",
"0.7662045",
"0.76151204",
"0.74891126",
"0.7480223",
"0.7432419",
"0.7416268",
"0.7400361",
"0.7388459",
"0.7362635",
"0.73541415",
"0.73291653",
"0.7320688",
"0.7310187",
"0.73080957",
"0.727898",
"0.727898",
"0.72756785",
"0.7259399",
"0.72041607",
"0.7153282",
"0.7138577",
"0.7133007",
"0.7115211",
"0.7110991",
"0.7082766",
"0.70749503",
"0.70564175",
"0.70442826",
"0.70442826",
"0.7042689",
"0.7031036",
"0.70136684",
"0.6993191",
"0.69715023",
"0.6967144",
"0.6963163",
"0.6963163",
"0.6963163",
"0.6959457",
"0.6956443",
"0.69524986",
"0.6950706",
"0.69328463",
"0.6924649",
"0.69214356",
"0.6916199",
"0.6916199",
"0.6916199",
"0.6911611",
"0.6905824",
"0.68983775",
"0.6894148",
"0.6890375",
"0.688718",
"0.688718",
"0.6874332",
"0.68705595",
"0.6852178",
"0.6815356",
"0.68117064",
"0.68117064",
"0.68117064",
"0.68117064",
"0.68050677",
"0.68050677",
"0.68050677",
"0.67882663",
"0.67864853",
"0.67864853",
"0.6785313",
"0.6783742",
"0.6778085",
"0.6777218",
"0.6777218",
"0.67766005",
"0.6769682",
"0.67588097",
"0.67550945",
"0.67550945",
"0.6753328",
"0.67481405",
"0.67479336",
"0.67413783",
"0.67376965",
"0.67376965",
"0.67338777",
"0.67236453",
"0.67236453",
"0.67236453",
"0.67236453",
"0.6717411",
"0.67060196",
"0.6699564",
"0.6682987",
"0.66780543",
"0.66655743",
"0.6654078",
"0.6647193",
"0.6635505",
"0.66232985"
] | 0.0 | -1 |
Return the error as a JSON response. | def throw_error(self, error, status_code=400, **extra):
data = dict(success=False, data=dict(message=error, **extra))
raise ShortCircuitHttpChain(response=JsonResponse(data, status=status_code)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def json_error(message):\n return json_response(isError=True, message=message)",
"def handle_error(error):\n response = jsonify(error.to_dict())\n response.status_code = error.status_code\n return response",
"def jsonify_exception(error: HTTPException) -> Response:\n exc_resp = error.get_response()\n response: Response = jsonify(reason=error.description)\n response.status_code = exc_resp.status_code\n return response",
"def http_exception(error):\n data = {'error': str(error)}\n return app.response_class(\n response=json.dumps(data),\n status=error.code,\n mimetype='application/json'\n )",
"def return_json_error(msg, status_code):\n return Response(response=json.dumps({'message': str(msg)}), status=status_code, mimetype=\"application/json\")",
"def _Error(message):\n return json.dumps({\n 'success': False,\n 'error': message,\n })",
"def json_err(msg: str) -> Response:\n return jsonify({\"success\": False, \"error\": msg})",
"def make_json_error(ex):\n if isinstance(ex, HTTPException):\n return ex;\n elif isinstance(ex, ResourceException):\n info = ex.to_dict()\n status_code = ex.http_status\n info[\"type\"] = \"exception\"\n else:\n message = \"There was an internal server error. Please try again later.\"\n info = {\"code\": \"internal_server_error\", \"message\": message, \"type\": \"exception\"}\n status_code = 500\n # generally we should log these 500 errors with the stacktrace somewhere -- we used splunk at Box.\n\n response = jsonify(**info)\n response.status_code = status_code\n return response",
"def auth_error(error):\n return jsonify(error.error), error.status_code",
"def internal_server_error(error_msg):\n return jsonify(error=str(error_msg))",
"def errorResponse(errormessage, format, extraJSON={}): \n \n if format == 'csv':\n return CSVResponse(\n [{'errormessage': errormessage}],\n fields=('errormessage',) )\n \n else:\n json_objects = extraJSON.copy()\n json_objects['error'] = True\n json_objects['errormessage'] = errormessage\n return JSONResponse(json_objects)",
"def response_error(error, status=400):\n\n response = {\n 'status': 'failed',\n 'error': error\n }\n\n return response_json(response, status=400)",
"def AsJson(self):\n\n return json.dumps(self._errors)",
"def error_response(error_text):\n return Response(json.dumps({'error' : error_text}), status=404, mimetype='application/json')",
"def _err_response(msg):\r\n return HttpResponse(json.dumps({'success': False, 'error': msg}),\r\n mimetype=\"application/json\")",
"def generic_errors(error, code):\n errors = {}\n errors[\"error\"] = error\n response = jsonify(errors)\n response.status_code = code\n return response",
"def handle_invalid_usage(error):\n\n response = jsonify(error.to_dict())\n response.status_code = error.status_code\n return response",
"def internal_error(error):\n return jsonify({'error': \"Internal Server Error. \"\n \"Bitte die Logdatei für Details anschauen.\"}), 500",
"def handle_api_exception(error):\n response = flask.jsonify(error.to_dict())\n response.status_code = error.status_code\n return response",
"def server_error(error=None):\n return jsonify({\n 'Error': 'Check if the request causes a server error'\n }), 500",
"def handle_invalid_usage(error):\n response = jsonify(error.to_dict())\n response.status_code = error.status_code\n return response",
"def handle_invalid_usage(error):\n response = jsonify(error.to_dict())\n response.status_code = error.status_code\n return response",
"def handle_invalid_usage(error):\n logging.warn(error.message)\n response = jsonify(error.to_dict())\n response.status_code = error.status_code\n return response",
"def handle_error(self, message):\n data = {\n \"success\": False,\n \"error\": message\n }\n\n return JsonResponse(data, status=200)",
"def error_return(content, status):\n content = '{' + '\"status\":{},\"message\":\"{}\"'.format(status, content) + '}'\n return Response(content, status=status, mimetype='application/json')",
"def error_response(status_code, message=None):\n payload = {'error': str(status_code)+\" : \"+HTTP_STATUS_CODES.get(status_code, \"Unknown Error\")}\n if message:\n payload['message'] = message\n response = jsonify(payload)\n response.status_code = status_code\n return response",
"def errorResponse(self):\n return self._errorResponse",
"def internal_server_error(error):\n return jsonify({\n 'success': False,\n 'error': STATUS_INTERNAL_SERVER_ERROR,\n 'message': ERROR_MESSAGES[STATUS_INTERNAL_SERVER_ERROR]\n }), STATUS_INTERNAL_SERVER_ERROR",
"def handle_exception(e):\r\n # start with the correct headers and status code from the error\r\n response = e.get_response()\r\n # replace the body with JSON\r\n response.data = json.dumps({\r\n \"code\": e.code,\r\n \"name\": e.name,\r\n \"description\": e.description,\r\n })\r\n response.content_type = \"application/json\"\r\n return response",
"def _error_response(self):\r\n response_dict = {'success': False, 'version': 1}\r\n self.send_response(\r\n 400, content=json.dumps(response_dict),\r\n headers={'Content-type': 'application/json'}\r\n )",
"def error(\n status=500,\n message=\"Internal Server Error\"\n):\n return make_response(\n jsonify(error=message),\n status,\n )",
"def _writeJSONErrorResponse(f, request):\n code = getattr(f.value, 'code', CODE.UNKNOWN)\n _writeJSONResponse(\n result=f.getErrorMessage().decode('ascii'),\n request=request,\n code=code,\n status=_mapErrorCodeToStatus(code))\n raise f",
"def on_response_validation_error(err):\n return jsonify(message='Bad response'), 500",
"def application_error(e):\n message = {\n 'status': 500,\n 'message': 'Sorry, unexpected error: ' + format(e)\n }\n resp = jsonify(message)\n resp.status_code = 500\n\n return resp",
"def handle_exception(e):\r\n # start with the correct headers and status code from the error\r\n response = e.get_response()\r\n # replace the body with JSON\r\n response.data = json.dumps({\r\n \"code\": e.code,\r\n \"name\": e.name,\r\n \"description\": e.description,\r\n })\r\n response.content_type = \"application/json\"\r\n return response",
"def service_errors(error):\r\n\r\n response = {'error': {'message': error.message, 'code': error.status_code}}\r\n\r\n return jsonify(response), error.status_code",
"def internal_server_error(error):\n return flask.jsonify({\"error\": \"Internal Server Error\"}), 500",
"def handle_exception(error):\n return make_response(jsonify({'message': error.description}), 400)",
"def response(self):\r\n error = unicode(self)\r\n return HttpResponseBadRequest(json.dumps({'error': error}))",
"def get_http_error_response_json(error_title, error_key, error_value):\n http_error_resp = {}\n http_error_resp[\"errors\"] = []\n http_error_resp[\"errors\"].append(\n {\"error\": error_title, \"error_values\": [{error_key: error_value}]}\n )\n return json.dumps(http_error_resp)",
"def _rest_error(self, status_code, error_code, message):\n return {\"status_code\": status_code, \"error_code\": error_code, \"message\": message}",
"def jsonify_http_exception(exception: HTTPException):\n return jsonify(exception.description, exception.code)",
"def handle_exception(e):\n # start with the correct headers and status code from the error\n response = e.get_response()\n # replace the body with JSON\n response.data = json.dumps({\n \"code\": e.code,\n \"name\": e.name,\n \"description\": e.description,\n })\n response.content_type = \"application/json\"\n return response",
"def handle_root_exception(error):\n code = 400\n if hasattr(error, 'code'):\n code = error.code\n d = dict(_error=str(error))\n s = json.dumps(d)\n return (s, code, [('Content-Type', 'application/json')])",
"def response_from_error(error_code, error_message=None):\n\terror = Error(error_code, error_message).__dict__\n\terror_response_code = error['response_code']\n\treturn Response(json.dumps(error), status=error_response_code, mimetype='application/json')",
"def error(self, http_error):\n return HTTPResponse(str(http_error), status=http_error.status)",
"def handle_exception(e):\n # start with the correct headers and status code from the error\n response = e.get_response()\n # replace the body with JSON\n response.data = json.dumps({\n \"code\": e.code,\n \"name\": e.name,\n \"description\": e.description,\n })\n print(response.data)\n response.content_type = \"application/json\"\n return response",
"def iftttError(code, error):\n return {\n \"statusCode\": code,\n \"body\": json.dumps({\n \"errors\": [\n {\n \"message\":error\n }\n ],\n }),\n }",
"def error(self):\n error = self._wrapped.error\n if error:\n return error\n\n return self.json['response'].get('error')",
"def to_response_data(self) -> typing.Any:\n v = self.value or {}\n error_code = v.get(\"code\", \"GenericLobotomyError\")\n error_message = v.get(\"message\", \"There was an error.\")\n return {\"Error\": {\"Code\": error_code, \"Message\": error_message}}",
"def return_request_error(error_message: str, http_status_code: int, response: Response):\n response.status_code = http_status_code\n return {\n 'error': error_message\n }",
"def _err_response(self, msg):\r\n return {'success': False, 'error': msg}",
"def serialize_error(success, object, reason):\n\n return json.dumps({\"success\": success, \"object\": object, \"status\": reason}, indent=2, sort_keys=True)",
"def exceptionhandler(e):\n response = e.get_response()\n response.data = json.dumps({\n \"code\" : e.code,\n \"name\": e.name,\n \"description\": e.description\n })\n response.content_type = \"application/json\"\n\n return response",
"def write_error(self, status_code, exc_info, **kwargs):\n response = {\n \"data\": None,\n \"errors\": [ str(exc_info[1]) ]\n }\n\n self.set_status(status_code)\n self.write(json.dumps(response))",
"def make_error_response(status, error):\n return dict(status=status, error=str(error))",
"def internal_server_error(error): # pylint: disable=unused-argument\n response = jsonify(\n {\n \"success\": False,\n \"error_code\": 500,\n \"message\": \"Internal Server Error\",\n }\n )\n return response, 500",
"def internal_error(error):\n return jsonify(error='configuration could not be generated')",
"def error_response(msg='Unknown'):\n return \"\"\"{{\"InternalServerError\":\"{}\"}}\"\"\".format(msg)",
"def internal_error(error):\n return f'{\"code\": 500, \"message\": \"{str(error)}\"}', 500",
"def resource_bad_request(error_msg):\n return jsonify(error=str(error_msg))",
"def handle_uncaught_error(e):\n status_code = 500\n\n result = {\n \"error_message\": \"Unknown or unexpected error.\",\n \"error_code\": \"INTERNAL_SERVER_ERROR\"\n }\n return jsonify(result), status_code",
"def __get_response_error(message, response):\n\n rjson = response.json()\n error_description = \"Code %s - %s\" %(str(response.status_code), rjson.get('message'))\n\n return {\n 'app_message': \"%s\" % (message),\n 'error_description': \"[%s] - %s\" % (message, error_description),\n 'code': response.status_code\n }",
"def error_json(self, number=None, payload=None):\n try:\n spayload = json.dumps(payload)\n # spayload = payload.replace('\\\"','').replace('\\'','')\n except Exception:\n spayload = '\"\"'\n\n vals = (error_codes[number], str(number), spayload)\n self.debug(\"ERROR %s - %s - payload: %s\", *vals)\n\n return json.loads('{ \"Error\":\"%s\", \"Err\":\"%s\", \"Payload\":%s }' % vals)",
"def write_error(self, status_code, **kwargs):\n reason = \"Unknown Error\"\n\n # Get information about the triggered exception\n self.application.gs_globals[\"exception_fulltext\"] = repr(sys.exc_info())\n\n # Get the status code and error reason\n if status_code in list(ERROR_CODES):\n reason = ERROR_CODES[status_code]\n try:\n if \"exc_info\" in kwargs:\n _, error, _ = kwargs[\"exc_info\"]\n reason = error.reason\n except AttributeError:\n pass\n\n # Return JSON if this is an API call\n if \"/api/v1/\" in self.request.uri:\n jsondict = {\n \"page_title\": \"Error {}: {}\".format(status_code, reason),\n \"error_status\": status_code,\n \"error_reason\": reason,\n \"error_exception\": self.application.gs_globals[\"exception_fulltext\"],\n }\n self.set_header(\"Content-type\", \"application/json\")\n self.write(json.dumps(jsondict))\n\n # Render the error template\n else:\n t = self.application.loader.load(\"error_page.html\")\n self.write(\n t.generate(\n gs_globals=self.application.gs_globals,\n status=status_code,\n reason=reason,\n user=self.get_current_user(),\n )\n )",
"def _construct_error_response_body(error_type, error_message):\n # OrderedDict is used to make testing in Py2 and Py3 consistent\n return json.dumps(OrderedDict([(\"Type\", error_type), (\"Message\", error_message)]))",
"def render_exception_json(exception_data):\n return json.dumps(exception_data, default=_json_serializer)",
"def error_message(message: str, http_code: int = 400) -> JsonResponse:\n _error_message = {'message': message}\n return JsonResponse(_error_message, json_dumps_params=json_dumps_params, status=http_code)",
"def json_response(content=\"\", status=\"OK\", status_code=200, error=\"\"):\n wrapper = {\n 'status': status,\n 'status_code': status_code,\n 'output': content,\n 'error': error\n }\n response = json.dumps(wrapper, cls=DjangoJSONEncoder, indent=4)\n return HttpResponse(response, content_type='application/json', status=status_code)",
"def bad_request(self, error):\n return jsonify({'error': 'BAD REQUEST'}), 400",
"def json_or_error(response):\n if 200 <= response.status_code < 300:\n if response.content:\n return response.json()\n else:\n # Response has no body. Return a status in a way that is consistent with other requests\n return {\n 'status': 'SUCCESS',\n 'httpStatusCode': response.status_code,\n 'httpStatus': httplib.responses[response.status_code],\n }\n else:\n raise JsonApiError('API request to {} failed with HTTP status {}: {}'.format(\n response.url, response.status_code, response.text))",
"def jsonable_server_error(request, template_name='500.html'):\r\n if request.is_ajax():\r\n msg = {\"error\": \"The edX servers encountered an error\"}\r\n return HttpResponseServerError(json.dumps(msg))\r\n else:\r\n return server_error(request, template_name=template_name)",
"def getError(self):\n \n return self.resp[\"error\"]",
"def internal_error_400(error):\n return jsonify({'error':\n \"Die Anfrage wurde syntaktisch falsch erstellt.\"}), 400",
"def invalid_response():\n return Response(\n '{\"error\": \"Invalid request\"}',\n status=400,\n mimetype='application/json'\n )",
"def process_exception(self, request, exception):\n logging.error(\"ERROR\")\n logging.error(traceback.format_exc())\n response = set_response(\"Internal server error\", False, 500, {})\n return JsonResponse(response, status=response[\"http_code\"])",
"def error_respond(self) -> 'JSONRPCErrorResponse':\n response = JSONRPCErrorResponse()\n\n response.error = self.message\n response.unique_id = self.request_id\n response._jsonrpc_error_code = self.jsonrpc_error_code\n if hasattr(self, 'data'):\n response.data = self.data\n return response",
"def create_error_response(data: Dict[str, str], status_code: int) -> Response:\n resp = jsonify(data)\n resp.status_code = status_code\n return resp",
"def output_message(message):\n output = json.dumps({\"error\": message}, indent=4)\n return output",
"def format_exception(self):\n if isinstance(self.message, dict):\n return self.message, self.status_code\n return Request.format_exception(self.message, self.status_code)",
"def error_response(http_response_code: Union[HTTPStatus, int], message: Text) -> JSONResponse:\n\n if isinstance(http_response_code, HTTPStatus):\n http_response_code = http_response_code.value\n\n return JSONResponse(dict(\n code=str(http_response_code),\n message=message\n ), http_response_code)",
"def _create_error_response(self, error):\n status = error.status\n try:\n body = json.loads(error.body)\n except Exception:\n body = {}\n if status in [403, 429]:\n # Parse differently if the error message came from kong\n errors = [ApiError(None, body.get(Responses.message, None))]\n else:\n errors = [ApiError(err.get(Responses.context, None),\n err.get(Responses.message, None))\n for err in body.get(Responses.errors, {})]\n return ErrorResponse(status, errors, headers=error.headers)",
"def to_error_response(message, errors, status_code=500):\n data = {\n 'message': message,\n 'errors': errors\n }\n\n return Response(data, status_code)",
"def handle_error(self, err): # pragma: no cover\n # log every exception raised in the application\n print('we ended up in the API handle_error()', err, err.__class__)\n\n # catch other HTTP errors\n if isinstance(err, HTTPException):\n original = getattr(err, \"original_exception\", None)\n return jsonify({\n 'success': False,\n 'error': err.code,\n \"message\": getattr(err.error, 'message')\n }), err.code\n\n # if 'message' attribute isn't set, assume it's a core Python exception\n if not getattr(err, 'message', None):\n original = getattr(err, \"original_exception\", None)\n return jsonify({\n 'message': 'Server has encountered an unknown error'\n }), 500\n\n # Handle application-specific custom exceptions\n return jsonify(**err.kwargs), err.http_status_code",
"def not_found(e):\n\n return json.dumps({\"error\": \"Endpoint not found\"})",
"def error_msg(error):\n if request.path.startswith(\"/api\"):\n return jsonify({\"message\": str(error)}), 500\n else:\n return render_template(\"error.html\", message=error), 500",
"def error(self) -> 'outputs.StatusResponse':\n return pulumi.get(self, \"error\")",
"def json(self):\n d = [err.json for err in self.errors]\n return d",
"def unprocessable_entity(error): # pylint: disable=unused-argument\n response = jsonify(\n {\n \"success\": False,\n \"error_code\": 422,\n \"message\": \"Unprocessable Entity\",\n }\n )\n return response, 422",
"def format_response(message, status, message_type=\"error\"):\n return make_response(\n jsonify({message_type: message}),\n status\n )",
"def response_json_error_info(func):\n def wrapper(request):\n try:\n return func(request)\n except Exception as ex:\n return get_json_response({\n \"status\": \"error\",\n \"error_info\": str(ex),\n \"trace_back\": traceback.format_exc()\n })\n\n return wrapper",
"def bad_request(error):\n return jsonify({\n 'success': False,\n 'error': STATUS_BAD_REQUEST,\n 'message': ERROR_MESSAGES[STATUS_BAD_REQUEST]\n }), STATUS_BAD_REQUEST",
"def make_error(status_code, message, sub_code=None, action=None, **kwargs):\n data = {\n 'status': status_code,\n 'message': message,\n }\n if action:\n data['action'] = action\n if sub_code:\n data['sub_code'] = sub_code\n data.update(kwargs)\n response = jsonify(data)\n response.status_code = status_code\n return response",
"def handle_unknown_errors(exc):\n return jsonify(dict(\n traceback=traceback.format_exc(),\n message=str(exc),\n )), 500",
"def gateway_error_response(self, exc):\n if hasattr(exc, \"get_stacks\"):\n # Process potentially multiple stacks.\n full_error, exc_stacks = \"\", exc.get_stacks()\n for i in range(len(exc_stacks)):\n full_error += exc_stacks[i][0] + \"\\n\"\n if i == 0:\n full_error += \"\".join(traceback.format_exception(*sys.exc_info()))\n else:\n entry = ApplicationException.format_stack(exc_stacks[i][1])\n full_error += entry + \"\\n\"\n\n exec_name = exc.__class__.__name__\n else:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n exec_name = exc_type.__name__\n full_error = \"\".join(traceback.format_exception(*sys.exc_info()))\n\n status_code = getattr(exc, \"status_code\", 400)\n if self.log_errors:\n if self.develop_mode:\n if status_code == 401:\n log.warn(\"%s: %s\", exec_name, exc)\n else:\n log.error(full_error)\n else:\n if status_code == 401:\n log.info(\"%s: %s\", exec_name, exc)\n else:\n log.info(full_error)\n\n result = {\n GATEWAY_ERROR_EXCEPTION: exec_name,\n GATEWAY_ERROR_MESSAGE: str(exc.message),\n GATEWAY_ERROR_EXCID: getattr(exc, \"exc_id\", \"\") or \"\"\n }\n if self.develop_mode:\n result[GATEWAY_ERROR_TRACE] = full_error\n\n if RETURN_MIMETYPE_PARAM in request.args:\n return_mimetype = str(request.args[RETURN_MIMETYPE_PARAM])\n return self.response_class(result, mimetype=return_mimetype)\n\n self._log_request_error(result, status_code)\n\n resp = self.json_response({GATEWAY_ERROR: result, GATEWAY_STATUS: status_code})\n # Q: Should HTTP status be the error code of the exception?\n resp.status_code = status_code\n return resp",
"def error_code(self):\n return self.json['response'].get('error_code')",
"def wsgi_tool_error_handler(e):\n status_code = e.code\n result = {\n \"error_message\": e.description,\n \"error_code\": e.name.upper().replace(\" \", \"_\")\n }\n return jsonify(result), status_code",
"def resource_not_found(error_msg):\n return jsonify(error=str(error_msg))",
"def render_to_json_response(self, data: Optional[Dict] = {}, meta: Optional[Dict] = {},\n error: Optional[str] = '', status=HTTPStatus.OK, **response_kwargs):\n response_data = {\"body\": data, \"meta\": meta, \"error\": error}\n return JsonResponse(response_data, status=status, **response_kwargs)",
"def not_found(self, error):\n return jsonify({'error': 'NOT FOUND'}), 404",
"def process_error(self, id, code, error):\n return {\n 'id': id,\n 'version': '1.1',\n 'error': {\n 'name': 'JSONRPCError',\n 'code': code,\n 'message': error,\n },\n }"
] | [
"0.7800289",
"0.77307755",
"0.7713259",
"0.7626865",
"0.7596588",
"0.75559425",
"0.7506786",
"0.74428797",
"0.7394055",
"0.73734087",
"0.7364901",
"0.73521066",
"0.7298466",
"0.72667325",
"0.7261724",
"0.7238622",
"0.7220519",
"0.72151345",
"0.71981364",
"0.7188909",
"0.7185343",
"0.7185343",
"0.7182443",
"0.7104869",
"0.7094352",
"0.7085228",
"0.70829767",
"0.70825493",
"0.7061445",
"0.70613116",
"0.70545423",
"0.7040451",
"0.7032478",
"0.70237225",
"0.7010634",
"0.69967014",
"0.6993156",
"0.6979576",
"0.6968191",
"0.6961554",
"0.69580734",
"0.69533765",
"0.69404066",
"0.6920621",
"0.69077605",
"0.68983763",
"0.68965673",
"0.6885261",
"0.6882929",
"0.68617415",
"0.6852109",
"0.6851645",
"0.6848207",
"0.6825006",
"0.6823644",
"0.68144697",
"0.68092567",
"0.6796736",
"0.6789502",
"0.6760654",
"0.6757212",
"0.6727063",
"0.67148435",
"0.67018926",
"0.664753",
"0.66398996",
"0.66346943",
"0.66326076",
"0.6624961",
"0.66130245",
"0.66103405",
"0.66004074",
"0.65988964",
"0.65943474",
"0.65902585",
"0.65855384",
"0.65771294",
"0.65722394",
"0.657088",
"0.6545173",
"0.6544177",
"0.65258455",
"0.6523655",
"0.6518257",
"0.6502433",
"0.6492964",
"0.64527494",
"0.640611",
"0.64050055",
"0.64040357",
"0.63931304",
"0.6388972",
"0.63873726",
"0.6385003",
"0.6381474",
"0.63720405",
"0.636211",
"0.6321524",
"0.6309832",
"0.63071865",
"0.6300861"
] | 0.0 | -1 |
Returns a value in a nested associative structure, where `ks` is a sequence of keys. Returns `None`, if the key is not present, or the `default` value, if supplied. | def get_in(d, ks, default=None):
*ks_, last = ks
d_ = d
for k in ks_:
if type(d_) != dict or k not in d_:
return default
d_ = d_[k]
if type(d_) == dict:
return d_.get(last, default)
return default | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def do_get(d, *ks, **kwargs):\n try:\n res = reduce (lambda acc, k: acc[k], ks, d)\n except (KeyError, TypeError):\n if \"default\" in kwargs:\n return kwargs[\"default\"]\n else:\n t, v, tb = sys.exc_info()\n if t == KeyError:\n msg = \"nested keys {} not found in {}\".format(ks, d)\n else:\n msg = \"nesting of keys {} too is too deep for {}\".format(ks, d)\n raise KeyError, msg, tb\n else:\n return res",
"def _get_default(ddict, key, default):\n if ddict is None or key not in ddict or ddict[key] is None:\n return default\n return ddict[key]",
"def get(self, key, default=None):\n if self.root is not None:\n res = self._get(key, self.root)\n if res:\n return res\n else:\n return default\n return default",
"def get(dd, kk, default=0):\n if kk in dd.keys():\n return dd[kk]\n else:\n return default",
"def get(self, key: str, default=None):\n value = self._get(key)\n\n if value is None:\n return self._parent.get(key, default) if self._parent else default\n\n return value",
"def get(self, key, default=None):\n def find(found_item, _):\n \"\"\" This is the closer function which will be passed to find by key function , if key found than return the value \n otherwise return blanck\"\"\"\n if found_item:\n return found_item[1]\n else:\n return default\n\n return self._find_by_key(key, find)",
"def get(self, key, default=None):\n try:\n val = self[key]\n except KeyError:\n return default\n if val == []:\n return default\n return val",
"def get(self, key: str, default=None) -> Any:\n try:\n return self[key][0]\n except KeyError:\n return default",
"def value(\n self, key: _K = 0, default: t.Optional[object] = None\n ) -> t.Any:\n try:\n index = self.index(key)\n except (IndexError, KeyError):\n return default\n else:\n return self[index]",
"def get(self, keyname: str, default: Optional[Any] = None) -> Any:\n try:\n return self[keyname]\n except KeyError:\n return default",
"def safe_get(root_dict, list_keys, default_value=None):\n if root_dict is None:\n return default_value\n\n if list_keys is None or len(list_keys) == 0:\n return default_value\n\n dict_types = [dict, defaultdict]\n\n curr_obj = root_dict\n for k in list_keys:\n if type(curr_obj) in dict_types and k in curr_obj:\n curr_obj = curr_obj[k]\n else:\n curr_obj = default_value\n break\n\n return curr_obj",
"def get(self, key, default=None):\n try:\n return self.__getitem__(key)\n except ValueError:\n if default is not None:\n return default\n else:\n raise",
"def lookup(my_dict, my_key, default_value=None):\n if my_key in my_dict:\n return my_dict[my_key]\n else:\n return default_value",
"def get(self, key: Any, default: Optional[Any] = None) -> Any:\n try:\n return self[key]\n except (KeyError, ValueError, IndexError):\n return default",
"def get_default(section, option=\"\"):\n\tif not option:\n\t\tif defaults.has_key(section):\n\t\t\treturn defaults[section]\n\telse:\n\t\tif defaults.has_key(section):\n\t\t\tif defaults[section].has_key(option):\n\t\t\t\treturn defaults[section][option]\n\treturn None",
"def search_up(d, k, *default):\n if k in d:\n return d[k]\n if d.parent is d:\n if len(default):\n return default[0]\n else:\n raise AttributeError(k)\n else:\n return search_up(d.parent, k, *default)",
"def get_value(key, dic, default_dic):\n\n v = dic.get(key)\n\n if v is None:\n if key in default_dic:\n v = default_dic.get(key)\n else:\n print_log_msg(\n 'ERROR', 'get_param', 'key not in default_dic', key\n )\n\n return v",
"def get(self, key, default=None):\n try:\n return self[key]\n except KeyError:\n return default",
"def get(self, key, default=None):\r\n try:\r\n return self[key]\r\n except KeyError:\r\n return default",
"def peek(self, key, default=None):\n try:\n return iter(self.get(key, [])).next()\n except StopIteration:\n return default",
"def search_value(d, key, default=None):\n stack = [iter(d.items())]\n while stack:\n for k, v in stack[-1]:\n if isinstance(v, dict):\n stack.append(iter(v.items()))\n break\n elif k == key:\n return v\n else:\n stack.pop()\n return default",
"def get(self, k, default=None):\n return super(OrderedMultiDict, self).get(k, [default])[-1]",
"def get(self, key, default=None):\n return self[key] if key in self else default",
"def setdefault(self, k, default=_MISSING):\n if not super(OrderedMultiDict, self).__contains__(k):\n self[k] = None if default is _MISSING else default\n return self[k]",
"def getKey(kwargs,key,default=None):\n value = kwarg.get(key,default)\n if value==None:\n return default\n return value",
"def get_in(self, key=None, default=None):\n if key is None:\n raise KeyError(\"'Dict' attribute key can't be empty\")\n key_list = key.strip().split('.')\n data = self\n size = len(key_list)\n for index, k in enumerate(key_list):\n data = data.get(k)\n if index < size-1 and not isinstance(data, dict):\n return default\n return data",
"def deep_get(nested_dict, keys, default=None):\n if not isinstance(nested_dict, dict):\n raise exception.SysinvException(_(\n \"Expected a dictionary, cannot get keys {}.\".format(keys)))\n\n def _reducer(d, key):\n if isinstance(d, dict):\n return d.get(key, default)\n return default\n\n return functools.reduce(_reducer, keys, nested_dict)",
"def getFromDefaults(inDict, inKey, inLastDefault, *args):\n\n if inKey in inDict:\n return inDict[inKey]\n\n for defaultDict in args:\n if inKey in defaultDict:\n return defaultDict[inKey]\n\n return inLastDefault",
"def get(self, target_key: str, default_value: Any = None) -> Any:\n _keys = target_key.split('.')\n iteration = len(_keys)\n if iteration > 1:\n result = None\n counter = 1\n for key_holder in _keys:\n if counter == 1:\n result = self._data.get(key_holder, {})\n elif counter < iteration:\n result = result.get(key_holder, {})\n else:\n result = result.get(key_holder, default_value)\n counter += 1\n return result\n else:\n return self._data.get(_keys[0], default_value)",
"def dict_default(x, key=None):\n if isinstance(x, NoDefault):\n if key is None:\n raise KeyError()\n else:\n raise KeyError(key)\n elif isinstance(x, CallIfDefault):\n return x()\n else:\n return x",
"def get(self, key, default=None):\n if key in self:\n return self[key]\n return default",
"def getfirst(self, key, default=None):\n \n values = self.getlist(key)\n return values[0] if values else default",
"def safely_get_value(dct: Mapping, key: Any,\n default: Union[T, None] = None\n ) -> Union[Any, T]:\n if key in dct:\n return dct[key]\n else:\n return default",
"def get_value_or_default(self, key: Union[str, ConfigurationVariable], default_value: Any) -> Any:\n try:\n return self.get_value(key)\n except Undefined as e:\n logger.debug(e)\n return default_value",
"def lookup(key, default=None):\n def _lookup(mapping):\n return mapping.get(key, default)\n return _lookup",
"def get(self, key, default=None):\n try:\n return self._get(key)\n except Exception:\n return default",
"def get(self, key, default=None):",
"def get(self, ckey, default=None):\n obj = default\n keys = ckey.split('.')\n first = keys[0]\n if self.has_key(first):\n obj = super(DotDict, self).__getitem__(first)\n if first == ckey:\n if isinstance(obj, dict):\n return DotDict(obj)\n else:\n return obj\n if isdictinstance(obj):\n return DotDict(obj).get('.'.join(keys[1:]))\n elif isinstance(obj, list):\n for elem in obj:\n if isdictinstance(elem):\n newobj = elem.get('.'.join(keys[1:]))\n if newobj:\n if isinstance(newobj, dict):\n return DotDict(newobj)\n return newobj\n return obj",
"def get(self, path, default=None):\n keys = path.split(\"/\")\n val = None\n\n for key in keys:\n if val:\n if isinstance(val, list):\n val = [ v.get(key, default) if v else None for v in val]\n else:\n val = val.get(key, default)\n else:\n val = dict.get(self, key, default)\n\n if not val:\n break\n\n return val",
"def my(d,k):\n try:\n return d[k]\n except KeyError:\n return CONFIG_DEFAULTS[k]",
"def get_dict_attrib(in_dict, key, default=None):\n\n try:\n return in_dict[key]\n except KeyError:\n return default",
"def get(self, key: str, default: t.Optional[object] = None) -> t.Any:\n try:\n index = self.__keys.index(str(key))\n except ValueError:\n return default\n if 0 <= index < len(self):\n return self._super_getitem_single(index)\n else:\n return default",
"def deep_get(_dict, keys, default=None):\n keys=keys.split('.')\n def _reducer(d, key):\n if isinstance(d, dict):\n return d.get(key, default)\n return default\n return reduce(_reducer, keys, _dict)",
"def dict_get_first_of(d: dict, key, *opt_keys, return_key: bool = False, **kw):\n knf = KeyNotFound()\n k = key\n v = d.get(key, knf)\n n = len(opt_keys)\n i = 0\n while isinstance(v, KeyNotFound) and i < n:\n k = opt_keys[i]\n v = d.get(k, knf)\n i += 1\n\n if isinstance(v, KeyNotFound):\n if 'default' in kw:\n _def = dict_default(kw['default'])\n if return_key:\n return None, _def\n else:\n return _def\n else:\n raise KeyError('none of the provided keys found in the dict')\n if return_key:\n return k, v\n else:\n return v",
"def deep_get(_dict, keys, default=None):\n keys = keys.split('.')\n\n def _reducer(d, key):\n if isinstance(d, dict):\n return d.get(key, default)\n return default\n return reduce(_reducer, keys, _dict)",
"def get(self, key, default=None, from_global=None, as_tuple=False):\n if from_global is None:\n value = self.get(\n key,\n default=None,\n from_global=False,\n as_tuple=as_tuple\n )\n if (isinstance(value, tuple) and value[0] is None) or value is None:\n value = self.get(\n key,\n default=default,\n from_global=True,\n as_tuple=as_tuple\n )\n return value\n elif from_global:\n return self.get_global(key, default, as_tuple)\n else:\n return self.get_local(key, default, as_tuple)",
"def get(self, key, default=None):\n pass",
"def get(self, name, default=None, _prev=None):\n if _prev is None:\n self._current_chain = [name]\n else:\n self._current_chain.append(name)\n\n if name in self:\n return self[name]\n elif default is not None:\n return default\n elif self._separator in name:\n return self.get(self._parent(name), _prev=name)\n elif self.error_if_missing:\n msg = \"Could not find key in the following inheritance chain:\\n{}\"\n raise KeyError(msg.format('\\n'.join(self._current_chain)))\n else:\n return None",
"def get(self, key, default=None):\n try:\n return self.data[key]\n except KeyError:\n self[key] = default\n return default",
"def get(self, key, default=None):\n try:\n value = self.list[key]\n except KeyError:\n return default\n else:\n return value",
"def get(self, key, default=0):\n try:\n return self[key]\n except KeyError:\n return default",
"def get_key(self, key, default=_MISSING):\n if '.' in key:\n # Nested key of form \"foo.bar\"\n key, remainder = key.split('.', 1)\n if default != _MISSING:\n try:\n value = self[key].get_key(remainder, default)\n except KeyError:\n # subdict exists, but doesn't contain key\n return default\n except AttributeError:\n # key points to non-dict thing, so no get_key attribute\n return default\n else:\n value = self[key].get_key(remainder)\n else:\n # Single, non-nested key of form \"foo\"\n if default != _MISSING:\n return self.get(key, default)\n else:\n return self[key]\n return value",
"def get(aMap, key, default=None):\n\ti, k, v = get_slot(aMap, key, default)\n\treturn v",
"def get_dictionary_default(path):\n if path in defaults_dict.keys():\n return defaults_dict[path]\n else:\n return ''",
"def get(self, key, default=None):\n return self._d.get(key, default)",
"def get(\n self,\n key: ir.Value,\n default: ir.Value | None = None,\n ) -> ir.Value:\n\n return ops.MapGet(self, key, default).to_expr()",
"def get_values_by_keys(k: list, default=None)->Callable[[dict], list]:\n return lambda d: list(map(lambda key: d.get(key, default), k))",
"def get(self, keys_path: str, default=None):\n keys = keys_path.strip().split(\"/\")\n if len(keys) < 1:\n return default\n\n result = self._inner\n for key in keys:\n if not result:\n return default\n\n result = result.get(key)\n\n return result",
"def get_default(self, stmt, key):\r\n default = None\r\n if key in self._values:\r\n default = self._values[key]\r\n return stmt.get(key, default)",
"def _resolve_with_default(\n self,\n key: Union[str, int, Enum],\n value: Any,\n default_value: Any = DEFAULT_VALUE_MARKER,\n ) -> Any:\n\n def is_mandatory_missing(val: Any) -> bool:\n return get_value_kind(val) == ValueKind.MANDATORY_MISSING # type: ignore\n\n value = _get_value(value)\n has_default = default_value is not DEFAULT_VALUE_MARKER\n if has_default and (value is None or is_mandatory_missing(value)):\n return default_value\n\n resolved = self._resolve_interpolation(\n key=key,\n value=value,\n throw_on_missing=not has_default,\n throw_on_resolution_failure=not has_default,\n )\n if resolved is None and has_default:\n return default_value\n\n if is_mandatory_missing(resolved):\n if has_default:\n return default_value\n else:\n raise MissingMandatoryValue(\"Missing mandatory value: $FULL_KEY\")\n\n return _get_value(resolved)",
"def get(aMap, key, default=None):\n\ti, k, v = get_slot(aMap, key, default=default)\n\treturn v",
"def safe_get(self,section,key,default_value=None):\n try:\n return self.get(section,key)\n except:\n return default_value",
"def search(d, key, default=None):\n stack = [iter(d.items())]\n while stack:\n for k, v in stack[-1]:\n if isinstance(v, dict):\n stack.append(iter(v.items()))\n break\n elif k == key:\n return v\n else:\n stack.pop()\n return default",
"def get(key, default=None):\n config = _get_config_dict()\n return config.get(key, default)",
"def get(\n self, key: str, default: Optional[Any] = None, version: Optional[int] = None\n ) -> Optional[Any]:\n return default",
"def setdefault(self, k, d):\n try:\n return self[k]\n except KeyError:\n self[k] = d\n return d",
"def nested_get(input_dict, nested_key):\n internal_dict_value = input_dict\n for k in nested_key:\n internal_dict_value = internal_dict_value.get(k, None)\n if internal_dict_value is None:\n return(None)\n return(internal_dict_value)",
"def nested_get(input_dict, nested_key):\n internal_dict_value = input_dict\n for k in nested_key:\n internal_dict_value = internal_dict_value.get(k, None)\n if internal_dict_value is None:\n return(None)\n return(internal_dict_value)",
"def _get_default_value(type_name, is_simple, is_iterative, is_required):\n # Iterables: convert via pre-defined mappings.\n if is_iterative:\n if is_required:\n return _get_iterative_default_value()\n else:\n return _get_iterative_null_value()\n # Simple types: convert via pre-defined mappings.\n elif is_simple:\n if is_required:\n return _get_simple_default_value(type_name)\n else:\n return _get_simple_null_value(type_name)\n # Complex types: convert via pre-defined mappings.\n else:\n if is_required:\n return _get_complex_default_value(type_name)\n else:\n return _get_complex_null_value(type_name)",
"def find(root, path, default_value=None):\n if root is None or path is None:\n return default_value\n\n assert isinstance(path, str)\n tokens = path.split('.')\n for token in tokens:\n root = root.get(token)\n if root is None:\n return default_value\n\n return root",
"def do_attrs(d, *ks, **kwargs):\n if not isinstance(d, dict):\n raise TypeError(\"expected dict as first argument, \"\n \"got {}\".format(type(d)))\n if len(ks) == 1 and isinstance(ks[0], collections.Iterable):\n ks = ks[0]\n if \"default\" in kwargs:\n default = kwargs['default']\n ret_val = map(lambda kw: d.get(kw, default), ks)\n else:\n try:\n ret_val = map(lambda kw: d[kw], ks)\n except KeyError:\n t, v, tb = sys.exc_info()\n msg = \"{} not found in {}\".format(v, d.keys())\n raise t, msg, tb\n return ret_val",
"def setdefault(self, key, default=None):\n if key in self:\n return self[key]\n else:\n self[key] = default\n return default",
"def get(dict, var, default=None):\n\n if dict.has_key(var):\n return dict[var]\n else:\n return default",
"def lookup(self, key, default=None):\n hash_key = hash(key) % self.length\n bucket = self.array[hash_key]\n if not bucket:\n return default\n for key_val_pair in bucket:\n k, v = key_val_pair\n if k == key:\n return v",
"def get(self, key, default=None):\n try:\n return self.context.get(self.prefix+'.'+key, default)\n except AttributeError:\n return default",
"async def _iterate_dict(self, d, key_path, default):\n key, _iter = KeyPathsIter([key_path]).__next__()\n while _iter is not None:\n if key not in d:\n return default\n d = d[key]\n key, _iter = _iter.__next__()\n if key not in d:\n return default\n return d[key]",
"def get_value(key, obj, default=missing):\n if isinstance(key, int):\n return _get_value_for_key(key, obj, default)\n return _get_value_for_keys(key.split('.'), obj, default)",
"def safedictkey(dictname, keyname, default='None'):\r\n try:\r\n dictname_temp = dictname\r\n for value in keyname:\r\n dictname_temp = d = dictname_temp[value]\r\n return d\r\n except Exception:\r\n return default",
"def setdefault(self, key, default=0):\n val = self._lookup(key).value\n if val is None:\n self[key] = default\n return default\n return val",
"def get(self, key, default_val=None):\n if key not in self._config.keys(): # we don't want KeyError\n return default_val # just return None if not found\n return self._config[key]",
"def setdefault(self, key, default=None):\r\n try:\r\n return self[key]\r\n except KeyError:\r\n self[key] = default\r\n return default",
"def lookup(self, s, default=None):\n\t\thead, tail = s[0], s[1:]\n\t\tnode = self.root[head]\n\t\tif tail:\n\t\t\treturn node.lookup(tail)\n\t\treturn node.value or default",
"def get(self, keys, default=None):\n # We implement get manually since __getitem__ will always return a value\n # for a properly formatted key. This explicit method uses the provided\n # default with the benefit that __getitem__ can be defined in terms of\n # get.\n value = {}\n for key in self._yield_keys(keys):\n try:\n value[key] = self._single_getitem(key)\n except KeyError:\n value[key] = default\n return _proper_type_return(value)",
"def get_item(self, key, default_value):\n # TODO: Add self.prefix\n return self.table.getValue(key, default_value)",
"def get_from_dict(d, k):\n try:\n return reduce(dict.get, k, d)\n except TypeError:\n # Value not found.\n return None",
"def get(cls, key: t.Any, default: t.Optional[t.Any] = None) -> t.Any:\n return cls.__labels__.get(key, default)",
"def get(ind, seq, default=no_default):\n if isinstance(ind, list):\n return tuple(get(i, seq, default) for i in ind)\n if default is no_default:\n return seq[ind]\n else:\n try:\n return seq[ind]\n except (KeyError, IndexError):\n return default",
"def get(self, key, default=None):\n return self._data.get(key, default)",
"def get(aMap,key,default=None):\n\ti,k,v=get_slot(aMap,key,default=default)",
"def setdefault(self, key: str, default: Any = None) -> Any:\n try:\n return self[key]\n except KeyError:\n self[key] = default\n return self[key]",
"def getdict(self, section, option, default=None):\r\n return self.get(section, option, type=dict, default=default)",
"def test_find_default(self):\n mute_map = MutableMap(**VALUE)\n\n assert mute_map.find('NOT_VALID', 'default_val') == \\\n 'default_val', 'default should be used'\n assert mute_map.find('str_val', 'default_val') == \\\n VALUE['str_val'], 'default should be ignored'",
"def get_value(key, default_value=None):\r\n # Get value from project configuration\r\n value = get_project_value(key)\r\n # Use package configuration when value has not been found\r\n if value is None:\r\n value = get_package_value(key)\r\n # Return package/project value\r\n if value is not None:\r\n return value\r\n # Otherwise use default value\r\n return default_value",
"def _get_simple_default_value(simple):\n return _SIMPLE_DEFAULT_VALUES[simple]",
"def _getatt(attrs, key, ns=None, default=None):\n keytuple = (ns, unicode(key))\n if attrs.has_key(keytuple):\n result = attrs[keytuple]\n else:\n result = default\n return result",
"def get(self, key, default=None):\n return self.data.get(key, default)",
"def get(self, key, default=None):\n return self.data.get(key, default)",
"def get(self, key, default=None, namespace=None):\n if namespace:\n key = self.add_namespace(key, namespace)\n \n value = self._store.get(key)\n if default != None and value is None:\n value = default\n return value",
"def get(self, key, default=None):\n raise NotImplementedError()",
"def get(self, key, default=None):\n key = self._validate_key(key)\n sql = u\"\"\"\n SELECT `value` FROM `{table}` WHERE key = ?\n \"\"\".format(table=self.name)\n\n r = self.conn.execute(sql, (key,)).fetchone()\n\n if r:\n return self.convert_out(r['value'])\n\n return default"
] | [
"0.66533864",
"0.6498185",
"0.63302475",
"0.6233764",
"0.62260294",
"0.6200173",
"0.6188591",
"0.6064579",
"0.604745",
"0.6032425",
"0.6025601",
"0.60055006",
"0.6003231",
"0.59860516",
"0.59845686",
"0.5940937",
"0.59354126",
"0.5911613",
"0.5899532",
"0.5888177",
"0.5871198",
"0.58551186",
"0.5841778",
"0.5826931",
"0.5815506",
"0.57729095",
"0.5767121",
"0.5760459",
"0.57500494",
"0.5739984",
"0.5731379",
"0.5723014",
"0.5716019",
"0.57082325",
"0.5696898",
"0.5695489",
"0.5682254",
"0.56801486",
"0.5666924",
"0.5657535",
"0.56491613",
"0.5647637",
"0.5633998",
"0.5620934",
"0.56012726",
"0.5591707",
"0.5585011",
"0.5567217",
"0.55590147",
"0.5556519",
"0.5552755",
"0.55383295",
"0.55158836",
"0.55039394",
"0.54999393",
"0.5489717",
"0.5485378",
"0.54852647",
"0.54649585",
"0.54598725",
"0.5453248",
"0.5452772",
"0.54429334",
"0.53976244",
"0.5393728",
"0.53742725",
"0.5369416",
"0.5369416",
"0.5355881",
"0.5340674",
"0.5340037",
"0.5333459",
"0.5331762",
"0.53314734",
"0.52985734",
"0.5297004",
"0.5291025",
"0.52849287",
"0.5269272",
"0.526818",
"0.526247",
"0.5244381",
"0.52426034",
"0.5239099",
"0.5226266",
"0.52257055",
"0.52155954",
"0.518945",
"0.5184409",
"0.51827",
"0.517955",
"0.5174221",
"0.51728725",
"0.5161603",
"0.51593894",
"0.5158667",
"0.5158667",
"0.51506746",
"0.5146109",
"0.51458204"
] | 0.7002469 | 0 |
Associates a value in a nested associative structure, where `ks` is a sequence of keys and `v` is the new value, and returns a nested structure. If any levels do not exist, `dict`s will be created. | def assoc_in(d, ks, v):
*ks_, last = ks
d_ = d
for k in ks_:
if k not in d_:
d_[k] = {}
d_ = d_[k]
d_[last] = v
return d | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def nested_set(d: t.Dict, keys: t.Sequence[str], value: t.Any) -> t.Dict:\n if not keys:\n return d\n\n if len(keys) == 1:\n d[keys[0]] = value\n return d\n\n subd = d\n for key in keys[:-1]:\n if key not in subd:\n subd = subd.setdefault(key, {})\n else:\n subd = subd[key]\n\n subd[keys[-1]] = value\n return d",
"def insert_to_dict(dc: dict, k: str, v: dict):\n if k not in dc:\n dc[k] = list()\n dc[k].append(v)",
"def deep_update(d, u):\n for k, v in six.iteritems(u):\n dv = d.get(k, {})\n if not isinstance(dv, collections.abc.Mapping):\n d[k] = v\n elif isinstance(v, collections.abc.Mapping):\n d[k] = deep_update(dv, v)\n else:\n d[k] = v\n return d",
"def add_or_update_key_in_dict(dictionary, keys_list, level=-1, value=None):\n dictionary = deepcopy(dictionary) # make a copy of the dictionary to avoid changing the state of the original dictionary\n\n is_terminal_key = False\n\n if level == len(keys_list) - 1 or level == -1:\n is_terminal_key = True\n\n if not is_terminal_key and value:\n raise ValueError('Value cannot be set on non terminal keys')\n\n '''get the reference to the dictionary that holds the key within the nesting'''\n current_location = get_object_in_dict(dictionary, keys_list[:level])\n key_index_info = extract_key_and_index(keys_list[level])\n parent_key_info = extract_key_and_index(keys_list[0])\n\n key_type = key_index_info[0]\n key_name = key_index_info[1]\n\n if key_type == 'key':\n if is_terminal_key:\n current_location[key_name] = value\n if parent_key_info[0] == 'simple_list' or parent_key_info[0] == 'compound_simple_list':\n current_location[key_name] = value.split(';') if value else []\n else:\n '''if key is not a terminal key then it must be a dictionary'''\n current_location[key_name] = {}\n elif key_type == 'simple_list':\n if level == 0 or level == -1:\n if is_terminal_key:\n current_location[key_name] = value.split(';') if value else []\n else:\n current_location[key_name] = {}\n else:\n raise KeyError('* should be on the top most key.')\n\n elif key_type == 'compound_list' or key_type == 'compound_simple_list':\n key_index = int(key_index_info[2])\n\n '''if the dictionary doesn't contain the key then initialize it'''\n if key_name not in current_location:\n current_location[key_name] = []\n\n curr_list = current_location[key_name]\n if key_index < len(curr_list):\n current_location[key_name][level] = value if is_terminal_key else {}\n else:\n current_list = current_location[key_name]\n '''if the index exceeds the size of compound list then expand the list.'''\n for index in range(len(current_list), key_index):\n current_list.append(None if is_terminal_key else {})\n current_list.append(value if is_terminal_key else {})\n\n return dictionary",
"def createDict(given_dict, words, value):\n\tresult_dict = given_dict\n\t# base case: if list is empty, add the value to the dict\n\tif not words:\n\t\tif '$value' in result_dict:\n\t\t\tresult_dict['$value'].append(value)\n\t\telse:\n\t\t\tresult_dict['$value'] = [value]\n\telse:\n\t\t# if the first word is already in dict, traverse through treemap with that word\n\t\t# call createDict with the tail of the words list\n\t\tif words[0] in result_dict:\n\t\t\tresult_dict[words[0]] = createDict(result_dict[words[0]], words[1:], value)\n\t\telse:\n\t\t\t# if the first word is not in the dict, create a new path\n\t\t\t# call createDict with the tail of the words list\n\t\t\tresult_dict[words[0]] = createDict({}, words[1:], value)\n\n\treturn result_dict",
"def _set_by_path(dic, keys, value, create_missing=True):\n d = dic\n i = 0\n n_key = len(keys) - 1\n while i < n_key:\n k = keys[i]\n if isinstance(k, int):\n assert isinstance(d, list), \"Internal Error: %s is Expected as a list for %s.\" % (d, k)\n\n while len(d) <= k:\n d.insert(k, {})\n d = d[k]\n elif k in d:\n d = d[k]\n elif create_missing:\n next_key = keys[i + 1]\n if isinstance(next_key, int):\n if isinstance(d, list):\n d.insert(k, [])\n else:\n d[k] = []\n else:\n d[k] = {}\n d = d[k]\n else:\n return dic\n i += 1\n\n if isinstance(d, list) and keys[-1] >= len(d):\n d.insert(keys[-1], value)\n else:\n d[keys[-1]] = value\n return dic",
"def nested_set(data, keys, value):\n for key in keys[:-1]:\n data = data.setdefault(key, {})\n data[keys[-1]] = value",
"def recursive_mapping_update(d, u):\n if u is not None:\n for k, v in u.items():\n if isinstance(v, collections.Mapping):\n r = recursive_mapping_update(d.get(k, {}), v)\n d[k] = r\n else:\n d[k] = u[k]\n return d",
"def recursive_update_cfg(d, u):\n for k, v in u.iteritems():\n if isinstance(v, collections.Mapping):\n r = update(d.get(k, {}), v)\n d[k] = r\n else:\n d[k] = u[k]\n return d",
"def _update_dict(full_key, val, d):\n for vk, vv in val.items():\n # The key of value is not in d.\n # if vk not in d:\n # # Exit.\n # raise ValueError(\"{}.{} does not exist in options\".format(full_key, vk))\n # else: # The key of val is in d.\n if isinstance(vv, list): # The value of the key is list.\n d[vk] = np.array(vv) # Store it as a numpy array.\n elif isinstance(vv, dict): # The value of the key is dictionary.\n _update_dict(full_key + \".\" + vk, vv, d[vk]) # Call the function again.\n else: # At the leaf of the dictionary.\n d[vk] = vv",
"def flatten(d: MutableMapping, sep: str = \".\", parent_key: str = \"\") -> dict:\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, MutableMapping):\n items.extend(flatten(v, sep=sep, parent_key=new_key).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def update(d, u):\n\n for k, v in u.items():\n if isinstance(v, collections.Mapping):\n d[k] = update(d.get(k, {}), v)\n else:\n d[k] = v\n return d",
"def update(d, u):\n for k, v in u.items():\n if isinstance(v, collections.Mapping):\n d[k] = update(d.get(k, {}), v)\n else:\n d[k] = v\n return d",
"def nested_set(dictionary: dict, keys: list, value):\n nested_dict = dictionary\n for key in keys[:-1]:\n nested_dict = nested_dict[key]\n nested_dict[keys[-1]] = value\n return dictionary",
"def dict_deep_update(d, u, handlers=None):\n if handlers is None:\n handlers = {}\n for k, v in u.items():\n if isinstance(v, collections.Mapping):\n r = dict_deep_update(d.get(k, {}), v, handlers)\n d[k] = r\n elif k in d:\n h = handlers.get(type(v), None)\n if h is not None:\n d[k] = h(d[k], u[k])\n else:\n d[k] = u[k]\n else:\n d[k] = u[k]\n return d",
"def nest_dict(dct, keys):\n nested_dict = dct\n for key in reversed(keys):\n nested_dict = RecursiveDict({key: nested_dict})\n return nested_dict",
"def nested_dict():\n return defaultdict(nested_dict)",
"def create_level(dict, path_list, value):\n\tif len(path_list) == 0:\n\t\treturn\n\n\tfor k in path_list[:-1]:\n\t\tdict = dict[k]\n\t\n\tdict[path_list[-1]] = value",
"def add_or_append_dict_entry(main_dict, main_key, sub_key, value):\n # type: (dict, str, str, Any) -> dict\n if main_key not in main_dict:\n main_dict[main_key] = dict()\n if sub_key not in main_dict[main_key]:\n main_dict[main_key][sub_key] = [value]\n else:\n main_dict[main_key][sub_key].append(value)\n return main_dict",
"def set_nested_item(data_dict: dict, key_list: tuple or list, value):\r\n reduce(getitem, key_list[:-1], data_dict)[key_list[-1]] = value\r\n return data_dict",
"def deep_merge(d, u):\n stack = [(d, u)]\n while stack:\n d, u = stack.pop(0)\n for k, v in u.items():\n if not isinstance(v, collections.abc.Mapping):\n d[k] = v\n else:\n dv = d.setdefault(k, {})\n if not isinstance(dv, collections.abc.Mapping):\n d[k] = v\n else:\n stack.append((dv, v))",
"def update(d, u):\n for k, v in u.items():\n if isinstance(v, Mapping):\n d[k] = update(d.get(k, {}), v)\n else:\n d[k] = v\n return d",
"def flatten_dict(\n d, parent_key=\"\", sep=\".\", ignore_under_prefixed=True, mark_value=True\n):\n items = {}\n for k in d:\n if ignore_under_prefixed and k.startswith(\"__\"):\n continue\n v = d[k]\n if mark_value and k.startswith(\"_\") and not k.startswith(\"__\"):\n v = MarkValue(repr(v))\n\n new_key = sep.join((parent_key, k)) if parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.update(\n flatten_dict(\n v, new_key, sep=sep, ignore_under_prefixed=True, mark_value=True\n )\n )\n else:\n items[new_key] = v\n\n return items",
"def build_dct(dic, keys, value):\n key = keys.pop(0)\n if len(keys):\n dic.setdefault(key, {})\n build_dct(dic[key], keys, value)\n else:\n # Transform cookbook default attribute strings into proper booleans\n if value == \"false\":\n value = False\n elif value == \"true\":\n value = True\n # It's a leaf, assign value\n dic[key] = value",
"def extend(d, k, v):\n\tn = d.copy()\n\tn[k] = v\n\treturn n",
"def FlattenDictionary(value, keys=[]):\n result = {}\n if type(value) is dict:\n for key in value:\n result.update(FlattenDictionary(value[key], keys + [key]))\n return result\n else:\n key = '.'.join(keys)\n return {key: value}",
"def flatten_dict(d, parent_key=\"\", sep=\"_\"):\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, MutableMapping):\n items.extend(flatten_dict(v, new_key, sep=sep).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def _add_val_to_dict(self, d, k, v):\n if k in list(d):\n d[k] += v\n else:\n d[k] = [v]\n return d",
"def flatten_dict(d, sep=' ', parent_key=''):\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.extend(flatten_dict(v, sep=sep, parent_key=new_key).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def create_recursive_dot_dict(data: Dict[str, Any], cls=DotDict) -> Union[DotDict, DotDefaultDict]:\n res = cls()\n for k, v in data.items():\n k = k.split(\".\")\n target = res\n for i in range(0, len(k)-1):\n t2 = target.get(k[i])\n if t2 is None:\n t2 = cls()\n target[k[i]] = t2\n\n assert isinstance(t2, cls), f\"Trying to overwrite key {'.'.join(k[:i+1])}\"\n target = t2\n\n assert isinstance(target, cls), f\"Trying to overwrite key {'.'.join(k)}\"\n target[k[-1]] = v\n return res",
"def insert_into_dictionary(dictionary: Dict, key_sequence: List[str], value):\n parent = dictionary\n for key in key_sequence[:-1]:\n if key not in parent.keys():\n parent[key] = {}\n parent = parent[key]\n parent[key_sequence[-1]] = value",
"def set(cls, hierarchical_dict: dict, key: str, value: Any) -> None:\n # split according to '.'\n hierarchical_key = key.split(\".\")\n\n # go over the the dictionary according to the path, create the nodes that does not exist\n element = hierarchical_dict\n for key in hierarchical_key[:-1]:\n if key not in element:\n element[key] = {}\n element = element[key]\n\n # set the value\n element[hierarchical_key[-1]] = value",
"def flatten(d, parent_key='', sep='_'):\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.extend(flatten(v, new_key, sep=sep).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def __setitem__(self, k, v):\n #if tree is empty\n if self.is_empty():\n # inherited from LinkedBinaryTree class\n # _Item(k, v) is inheritated from MapBase class\n leaf = self._add_root(self._Item(k,v)) \n else:\n p = self._subtree_search(self.root(), k)\n #if k is present in current tree\n if p.key() == k:\n #it's not p.value()!!\n p.element()._value = v\n self._rebalance_access(p)\n return\n #didn't find k in current tree; create a new object of Item\n # and add to either left or right of the last node searched\n else:\n item = self._Item(k, v)\n if k > p.key():\n leaf = self._add_right(p, item)\n else:\n leaf = self._add_left(p, item)\n self._rebalance_insert(leaf)",
"def transform_key_to_nested_dict(k: str, v, nested_fields: set, list_fields: set, classification_code: list, new: dict):\n # Get all (nested) fields of a specific key\n fields = k.split(\".\")\n tmp = new\n # Create one dictionary for each field\n for key in fields:\n key = convert(key)\n try:\n tmp[key]\n except KeyError:\n # Add the value to the most nested level\n if key == fields[-1]:\n tmp[key] = transform_dict(v, convert, nested_fields, list_fields)\n if key == \"classification\":\n # Add classification code column\n tmp[\"classification_code\"] = transform_dict(\n classification_code, convert, nested_fields, list_fields\n )\n # Create empty dictionary for key\n else:\n tmp[key] = {}\n tmp = tmp[key]",
"def convert_dotKeyToNestedDict(self, tree, key, value):\n\n t = tree\n if \".\" in key:\n key, rest = key.split(\".\", 1)\n if key not in tree:\n t[key] = {}\n self.convert_dotKeyToNestedDict(t[key], rest, value)\n else:\n t[key] = value\n\n return t",
"def replace_key_value(lookup, new_value, expected_dict):\n for key, value in expected_dict.items():\n if lookup == key:\n if isinstance(value, dict) and isinstance(new_value, dict):\n value.update(new_value)\n else:\n expected_dict[key] = new_value\n elif isinstance(value, dict):\n expected_dict[key] = replace_key_value(lookup, new_value, value)\n return expected_dict",
"def deep_merge(origin: dict, renovator: Mapping) -> dict:\n\n for key, value in renovator.items():\n if isinstance(value, Mapping):\n node = origin.setdefault(key, {})\n deep_merge(node, value)\n else:\n origin[key] = value\n\n return origin",
"def flatten_dict(d, separator=':', _parent_key=''):\n items = []\n for k, v in d.items():\n new_key = _parent_key + separator + k if _parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.extend(flatten_dict(v, separator=separator, _parent_key=new_key).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def update_dct(dic1, dic2):\n for key, val in dic2.items():\n if isinstance(val, dict):\n dic1.setdefault(key, {})\n update_dct(dic1[key], val)\n else:\n dic1[key] = val",
"def update_dict(d, u, omit_new=False):\n\n for k, v in u.items():\n if k not in d and omit_new:\n continue\n\n if isinstance(v, collections.abc.Mapping):\n d[k] = update_dict(d.get(k, {}), v, omit_new)\n elif isinstance(v, list):\n d[k] = [update_dict(i, j, omit_new) if None not in (i, j) else\n i if j is None else j\n for (i, j) in itertools.zip_longest(d.get(k, []), v)]\n else:\n d[k] = v\n return d",
"def update_double_dict(outer, inner):\n for k, v in outer.items():\n outer[k].update(inner[k])",
"def mapper(d, keymap):\n updated_dict = {}\n for k, v in zip(d.keys(), d.values()):\n new_k = keymap.get(k, k)\n updated_dict[new_k] = d[k]\n return updated_dict",
"def mapper(d, keymap):\n\n updated_dict = {}\n for k, v in zip(d.keys(), d.values()):\n new_k = keymap.get(k, k)\n updated_dict[new_k] = d[k]\n return updated_dict",
"def update_dict(dictionary, key, value):\n if key in dictionary:\n dictionary[key].append(value)\n else:\n dictionary[key] = [value]\n return dictionary",
"def _add_item(dic: dict, keys: list, value):\n\tfor key in keys[:-1]:\n\t\tdic = dic.setdefault(key, {})\n\n\tdic[keys[-1]] = value",
"def setValue(dictionary, key, default_value):\n value = getValue(dictionary, key, default_value)\n new_dict = dict(dictionary)\n new_dict[key] = value\n return new_dict",
"def _get_nested_dict(dictionary, key, nested_config=None):\n if key not in dictionary:\n nested = {}\n if nested_config:\n _fill_zero_counters_dict(nested_config, nested)\n dictionary[key] = nested\n return nested\n return dictionary[key]",
"def dictionary_map(mapping):\n dictionary = dict()\n\n def add_item_to_dictionary(path, k, v):\n dictionary.update(\n dict_merge(\n dictionary,\n inflate_key_value_pair(\n join(path, k), v\n )\n )\n )\n\n loop_dictionary(\n mapping,\n callback=add_item_to_dictionary\n )\n\n return dictionary",
"def deepupdate(original, update):\n for key, value in original.iteritems():\n if not key in update:\n update[key] = value\n elif isinstance(value, dict):\n deepupdate(value, update[key])\n return update",
"def set_upward(self, key, value):\n context = self.dicts[-1]\n for d in reversed(self.dicts):\n if key in d:\n context = d\n break\n context[key] = value",
"def set_key(self, key, value):\n if '.' in key:\n key, remainder = key.split('.', 1)\n try:\n self[key].set_key(remainder, value)\n except KeyError:\n self[key] = AttrDict()\n self[key].set_key(remainder, value)\n except AttributeError:\n if self[key] is None: # If the value is None, we replace it\n self[key] = AttrDict()\n self[key].set_key(remainder, value)\n # Else there is probably something there, and we don't just\n # want to overwrite so stop and warn the user\n else:\n raise KeyError('Cannot set nested key on non-dict key.')\n else:\n self[key] = value",
"def merge_dict(a: dict, b: dict, path=None) -> dict:\n if path is None:\n path = []\n for key in b:\n if key in a:\n if isinstance(a[key], dict) and isinstance(b[key], dict):\n merge_dict(a[key], b[key], path + [str(key)])\n elif a[key] == b[key]:\n pass # same leaf value\n else:\n a[key] = b[key]\n else:\n a[key] = b[key]\n return a",
"def test_add_nested_dict_value():\n\n recursiveKeys = [\n \"reward_signals.extrinsic.strength\",\n \"reward_signals.extrinsic.gamma\",\n \"reward_signals.curiosity.strength\",\n \"reward_signals.curiosity.gamma\",\n ]\n\n expectedDict = {\n 'reward_signals': {\n 'curiosity': {'gamma': 1.0, 'strength': 1.0},\n 'extrinsic': {'gamma': 1.0, 'strength': 1.0},\n }\n }\n\n dictionary = {}\n\n for key in recursiveKeys:\n common.add_nested_dict_value(dictionary, key, 1.0)\n\n assert dictionary == expectedDict\n\n dictionary = {'reward_signals': {'extrinsic': {}}}\n\n for key in recursiveKeys:\n common.add_nested_dict_value(dictionary, key, 1.0)\n\n assert dictionary == expectedDict\n\n dictionary = {'reward_signals': {'extrinsic': {'gamma': 0.99}}}\n\n for key in recursiveKeys:\n common.add_nested_dict_value(dictionary, key, 1.0)\n\n assert dictionary == expectedDict",
"def merge_dict_recursive(target, src):\r\n for k in src.keys():\r\n if ((k in target and isinstance(target[k], dict) and\r\n isinstance(src[k], collections.Mapping))):\r\n merge_dict_recursive(target[k], src[k])\r\n else:\r\n target[k] = src[k]",
"def update_nested_dict(old_dict, new_dict, extend_list_values=False):\n for k, v in new_dict.items():\n if k in old_dict.keys():\n if isinstance(v, dict) and isinstance(old_dict[k], dict):\n old_dict[k] = update_nested_dict(\n old_dict[k], v, extend_list_values=extend_list_values\n )\n elif (\n extend_list_values\n and isinstance(old_dict[k], list)\n and isinstance(v, list)\n ):\n old_dict[k].extend(v)\n elif v:\n old_dict[k] = v\n else:\n old_dict[k] = v\n return old_dict",
"def add(self, k, v):\n values = super(OrderedMultiDict, self).setdefault(k, [])\n self._insert(k, v)\n values.append(v)",
"def add_to_dictionary(dictionary: dict, key: str, value) -> dict:\n dictionary[key] = value\n return dictionary",
"def _put(self, k, v, currNode):\n if k < currNode.key:\n if currNode.hasLeftChild():\n self._put(k, v, currNode.leftChild)\n else:\n currNode.leftChild = TreeNode(k, v, parent=currNode)\n\n elif k > currNode.key:\n if currNode.hasRightChild():\n self._put(k, v, currNode.rightChild)\n else:\n currNode.rightChild = TreeNode(k, v, parent=currNode)\n\n else:\n currNode.payload = v\n self.size -= 1",
"def _update(value: Dict[str, Any], update: Dict[str, Any]):\n for key, val in update.items():\n\n if key not in value:\n value[key] = val\n elif isinstance(val, dict):\n value[key] = _update(value[key], val)\n else:\n value[key] = val\n return value",
"def _append_or_create(dict_, key_, value):\n if key_ not in dict_:\n dict_[key_] = [value]\n else:\n assert isinstance(dict_[key_], list)\n dict_[key_].append(value)\n return dict_",
"def flatten_dict(in_obj: Dict[Any, Any], *, sep: str = '_', key_maker: Callable = None) -> Dict[str, Any]:\n\n if key_maker is None:\n key_maker = sep.join\n out_dict = {}\n for key, obj in in_obj.items():\n\n try:\n\n for inner_key, value in obj.items():\n try:\n\n out_dict.update(\n flatten_dict(\n {(key, inner_key): value},\n sep=sep,\n key_maker=key_maker\n )\n )\n\n except AttributeError:\n out_dict[key_maker(flatten_keys([key, inner_key]))] = value\n\n except AttributeError:\n out_dict[key_maker(flatten_keys(key))] = obj\n\n return out_dict",
"def depth_wrap(self, value):\n return DictToObj(**value) if isinstance(value, dict) else value",
"def merge(source, destination):\n for key, value in source.items():\n if isinstance(value, dict):\n # get node or create one\n node = destination.setdefault(key, {})\n merge(value, node)\n else:\n destination[key] = value\n\n return destination",
"def __setitem__(self, key, value):\n if isinstance(key, list):\n value = _ensure_len(len(key), value)\n for k, v in zip(key, value):\n defaultdict.__setitem__(self, k, v)\n else:\n defaultdict.__setitem__(self, key, value)\n return self",
"def map_leaf_values(f, value):\n if isinstance(value, list):\n return map(lambda x: map_leaf_values(f, x), value)\n elif isinstance(value, dict):\n ret = {}\n for (dict_key, dict_value) in value.items():\n key = map_leaf_values(f, dict_key)\n value = map_leaf_values(f, dict_value)\n ret[key] = value\n return ret\n else:\n return f(value)",
"def test_deep_set_create(self):\n mdict = copy.deepcopy(self.dict1)\n res = dictupdate.set_dict_key_value(mdict, \"K:L:M\", \"Q\")\n self.assertEqual(\n {\n \"A\": \"B\",\n \"C\": {\"D\": \"E\", \"F\": {\"G\": \"H\", \"I\": \"J\"}},\n \"K\": {\"L\": {\"M\": \"Q\"}},\n },\n res,\n )",
"def make_recursive(obj):\n if isinstance(obj, list):\n for i, l in enumerate(obj):\n obj[i] = AttrDict.make_recursive(l)\n elif isinstance(obj, dict):\n for k, v in obj.items():\n obj[k] = AttrDict.make_recursive(v)\n return AttrDict(obj)\n return obj",
"def _put(self, key: str, value):\n current_storage_dict = self._storage\n sub_keys = key.split('.')\n i = 1\n length = len(sub_keys)\n for sub_key in sub_keys:\n if i < length:\n if sub_key not in current_storage_dict:\n current_storage_dict[sub_key] = dict()\n current_storage_dict = current_storage_dict[sub_key]\n elif sub_key in current_storage_dict and isinstance(current_storage_dict[sub_key], dict):\n current_storage_dict = current_storage_dict[sub_key]\n else:\n raise TypeError('Cannot overwrite key {}'.format(key))\n\n else:\n current_storage_dict[sub_key] = value\n\n i += 1",
"def pdict(self, *args, **kwargs):\n if self is self.__root__:\n try:\n if self.all(lambda x: len(x) == 2):\n return pdict({k: v for k, v in self}).update(*args, **kwargs)\n except Exception:\n pass\n try:\n return pdict({k: v for k, v in zip(self.__root__.key, self)}).update(*args, **kwargs)\n except Exception:\n pass\n return pdict({k: v for k, v in zip(self.__root__, self)}).update(*args, **kwargs)",
"def construct_kv_dict(self):\r\n key1 = self.key_factory('existing_field')\r\n key2 = self.key_factory('other_existing_field')\r\n new_value = 'new value'\r\n newer_value = 'newer value'\r\n return {key1: new_value, key2: newer_value}",
"def append_by_dot_path(dictionary: Dict, key_path: str, value: Any) -> Dict:\n try:\n get_by_dot_path(dictionary, key_path).append(value)\n except KeyError:\n add_by_dot_path(dictionary, key_path, [value])\n return dictionary",
"def update_dict(target, *updates):\r\n for update in updates:\r\n for key, val in list(update.items()):\r\n if isinstance(val, collections.Mapping):\r\n target[key] = update_dict(target.get(key, {}), val)\r\n else:\r\n target[key] = update[key]\r\n return target",
"def sub_dict(d):\n r = {}\n for k in d:\n if type(d[k]) in prims:\n r[k] = d[k]\n elif type(d[k]) is list:\n r[k] = sub_list(d[k])\n elif type(d[k]) is dict:\n r[k] = sub_dict(d[k])\n else:\n print \"Unknown Type: {}\".format(type(d[k]))\n return r",
"def flatten(d: Union[dict, list], parent_key: str = \"\", sep: str = \".\") -> dict:\n items = []\n if isinstance(d, dict):\n for k, v in d.items():\n new_key = f\"{parent_key}{sep}{k}\" if parent_key else str(k)\n items.extend(flatten(v, new_key, sep=sep).items())\n elif isinstance(d, list):\n for i, elem in enumerate(d):\n new_key = f\"{parent_key}{sep}{i}\" if parent_key else str(i)\n items.extend(flatten(elem, new_key, sep).items())\n else:\n items.append((parent_key, d))\n return dict(items)",
"def recursive_update(d1, d2):\n\n for key, value in d2.items():\n if key in d1 and isinstance(d1[key], dict) and isinstance(value, dict):\n recursive_update(d1[key], value)\n else:\n d1[key] = value",
"def deep_update(source, overrides):\n for key, value in overrides.iteritems():\n if isinstance(value, collections.Mapping) and value:\n returned = deep_update(source.get(key, {}), value)\n source[key] = returned\n else:\n source[key] = overrides[key]\n return source",
"def addtwodimdict(self, thedict, key_a, key_b, val):\r\n if key_a in thedict:\r\n thedict[key_a].update({key_b: val})\r\n else:\r\n thedict.update({key_a: {key_b: val}})",
"def assign(self, key, value):\n key_split = key.split('.')\n cur_dict = self\n for k in key_split[:-1]:\n try:\n cur_dict = cur_dict[k]\n except KeyError:\n cur_dict[k] = self.__class__() # so that derived classes\n # remain true to type\n cur_dict = cur_dict[k]\n cur_dict[key_split[-1]] = value",
"def upsert(self, k, v):\n if k in self:\n self[k] = self[k].union(v)\n else:\n self[k] = v",
"def unflatten(\n d: Dict[str, Any],\n base: Dict[str, Any] = None,\n) -> Dict[str, Any]:\n if base is None:\n base = {}\n\n for key, value in d.items():\n root = base\n\n ###\n # If a dotted path is encountered, create nested dicts for all but\n # the last level, then change root to that last level, and key to\n # the final key in the path. This allows one final setitem at the bottom\n # of the loop.\n if '.' in key:\n *parts, key = key.split('.')\n\n for part in parts:\n root.setdefault(part, {})\n root = root[part]\n\n if isinstance(value, dict):\n value = unflatten(value, root.get(key, {}))\n\n root[key] = value\n\n return base",
"def overwrite_dict(dict_base, dict_new, base_path=None):\n assert isinstance(dict_new, dict)\n for k in dict_new:\n # Add the current key to the path\n k_path = str(k) if base_path is None else f'{base_path}.{str(k)}'\n # Make sure that the key in the new dictionary matches one from the base dictionary\n assert k in dict_base, f'Could not find path {k_path} in the base dictionary'\n # Check that the types match between the base dictionary entry and the new one\n if dict_base[k] is not None:\n assert isinstance(type(dict_base[k]), type(dict_new[k])), \\\n 'The types at {} in the base dictionary do not match (expected {}, got {})'.format(\n k_path, str(type(dict_base[k])), str(type(dict_new[k])))\n # Recursively replace dictionary entries\n if isinstance(dict_base[k], dict):\n overwrite_dict(dict_base[k], dict_new[k], k_path)\n else:\n # Simply copy over leaf entries\n dict_base[k] = dict_new[k]",
"def put(self, k: Any, v: Any):\n i = abs(hash(k)) % self.size\n current = self.data[i]\n while current is not None:\n if current.key == k:\n current.value = v\n return\n current = current.next\n new_node = self.Node(k, v)\n new_node.next = self.data[i]\n self.data[i] = new_node",
"def construct_kv_dict(self):\r\n key1 = user_state_key('field_a')\r\n key2 = user_state_key('field_b')\r\n new_value = 'new value'\r\n newer_value = 'newer value'\r\n return {key1: new_value, key2: newer_value}",
"def flatten(d):\n\n c = {}\n\n def _flatten(parents, items):\n for k, v in items:\n cur = parents + [k]\n if isinstance(v, list):\n _flatten(cur, enumerate(v))\n elif isinstance(v, dict):\n _flatten(cur, v.items())\n else:\n if v is None:\n cur.append('$NULL')\n v = ''\n name = str(cur[0]) + ''.join(['['+str(x)+']' for x in cur[1:]])\n c[name] = v\n \n _flatten([], d.items())\n\n return c",
"def deep_update(source: dict, overrides: Mapping):\n\n for key, value in overrides.items():\n if isinstance(value, Mapping) and value:\n returned = deep_update(source.get(key, {}), value)\n source[key] = returned\n else:\n source[key] = overrides[key]\n\n return source",
"def flatten_dict(nested, prefix=''):\n result = dict()\n\n for key, value in nested.items():\n prefix_key = '__'.join([prefix, str(key)]) if len(prefix) else key\n\n if key in IGNORED_DICT_KEYS and not isinstance(value, (dict, list)):\n continue\n\n elif isinstance(value, dict):\n result.update(flatten_dict(value, prefix_key))\n\n elif isinstance(value, (np.ndarray, list)):\n result.update(flatten_array(value, prefix_key))\n\n else:\n result[prefix_key] = value\n\n return result",
"def update_dict(original_dict, new_dict):\n if new_dict == None: return original_dict\n for k in new_dict:\n if k not in original_dict:\n original_dict[k] = []\n original_dict[k].append(new_dict[k])\n else: original_dict[k].append(new_dict[k])\n return original_dict",
"def map_dictionary(dictionary):\n mapping = dict()\n\n def add_item_to_mapping(path, k, v):\n mapping.update({join(path, k): v})\n\n loop_dictionary(\n dictionary,\n callback=add_item_to_mapping\n )\n return mapping",
"def pivot_nested_dict(nested_dict):\r\n\r\n reverse_nest_dict = {} #Create an empty dictionary\r\n for k, v in nested_dict.items(): #Iterate through each pair of elements\r\n for k2, v2 in v.items(): #Iterate through pair of values\r\n try:\r\n reverse_nest_dict[k2][k] = v2\r\n except KeyError:\r\n reverse_nest_dict[k2] = { k : v2 }\r\n return reverse_nest_dict\r\n \r\n #Create a dictionary that produces a different nested dictionary which\r\n #contains the same values\r",
"def update(sn, d):\n if isinstance(sn, SN):\n sn = vars(sn)\n d = unwind_nested_dict(decode(d))\n for k, v in d.items():\n if k in sn and isinstance(v, (dict, SN)) and isinstance(sn[k], (dict, SN)):\n update(sn[k], v)\n else:\n sn[k] = encode(v)",
"def init_from_dict(self, d):\n for k, v in d.items():\n # First, keys must be strings, not ints\n if isinstance(k, int):\n k = str(k)\n # Now, assign to the key, handling nested AttrDicts properly\n if isinstance(v, dict):\n self.set_key(k, AttrDict(v))\n elif isinstance(v, list):\n self.set_key(k, [i if not isinstance(i, dict) else AttrDict(i)\n for i in v])\n else:\n self.set_key(k, v)",
"def flatten_dict(base, v, d):\n if base != '':\n base = base + \".\"\n for k in d:\n if type(d[k]) in (type('a'), type(u'a'), type(1), type(1.1), type(False), type(None)):\n v[base + k] = d[k]\n elif type(d[k]) in (type([]), type((1,2))):\n v[base + k] = \", \".join(d[k])\n elif type(d[k]) == type({}):\n flatten_dict(base + k, v, d[k])\n else:\n print \"huh,\", type(d[k])",
"def add_by_list_of_keys(dictionary: Dict, key_path: List[Any], value: Any) -> Dict:\n key = key_path[0]\n dictionary[key] = (\n value\n if len(key_path) == 1\n else add_by_list_of_keys(\n dictionary[key] if key in dictionary else dict(),\n key_path[1:],\n value,\n )\n )\n return dictionary",
"def _flatten_dict(self, current, key, result):\n if isinstance(current, dict):\n for k in current:\n new_key = \"{1}\".format(key, k) if len(key) > 0 else k\n self._flatten_dict(current[k], new_key, result)\n else:\n result[key] = current\n return result",
"def _dict_merge(merge_dict, into_dict):\n for k, v in merge_dict.items():\n if k not in into_dict:\n into_dict[k] = v\n continue\n\n current_val = into_dict[k]\n\n if isinstance(v, dict) and isinstance(current_val, dict):\n _dict_merge(v, current_val)\n continue\n\n # otherwise we just overwrite\n into_dict[k] = v",
"def __create_level_entries_dict__(self,\n tree_level_labels,\n tree_level_values,\n ):\n # | - create_level_entries_dict\n level_entries_dict = {}\n for index, variable in enumerate(tree_level_labels):\n level_entries_dict[variable] = tree_level_values[index]\n\n return(level_entries_dict)\n # __|",
"def deepupdate(target, src, overwrite=True):\n for k, v in src.items():\n if type(v) == list:\n if k not in target:\n target[k] = copy.deepcopy(v)\n elif overwrite is True:\n target[k].extend(v)\n elif type(v) == dict:\n if k not in target:\n target[k] = copy.deepcopy(v)\n else:\n deepupdate(target[k], v, overwrite=overwrite)\n elif type(v) == set:\n if k not in target:\n target[k] = v.copy()\n elif overwrite is True:\n if type(target[k]) == list:\n target[k].extend(v)\n elif type(target[k]) == set:\n target[k].update(v)\n else:\n raise TypeError(\"Cannot update {} with {}\".format(\n type(target[k]),\n type(v))\n )\n else:\n if k not in target or overwrite is True:\n target[k] = copy.copy(v)",
"def asdict(v: Any) -> Dict[Any, Any]:\n return to_dict(v, reuse_instances=False, convert_sets=False)",
"def test_deep_append(self):\n sdict = {\"bar\": {\"baz\": [1, 2]}}\n res = dictupdate.append_dict_key_value(sdict, \"bar:baz\", 42)\n self.assertEqual({\"bar\": {\"baz\": [1, 2, 42]}}, res)\n # Append with alternate delimiter\n res = dictupdate.append_dict_key_value(sdict, \"bar~baz\", 43, delimiter=\"~\")\n self.assertEqual({\"bar\": {\"baz\": [1, 2, 42, 43]}}, res)\n # Append to a not-yet existing list\n res = dictupdate.append_dict_key_value({}, \"foo:bar:baz\", 42)\n self.assertEqual({\"foo\": {\"bar\": {\"baz\": [42]}}}, res)"
] | [
"0.6572794",
"0.63907355",
"0.6336366",
"0.62305176",
"0.6214868",
"0.61555994",
"0.59882414",
"0.59860635",
"0.5962133",
"0.590882",
"0.58780855",
"0.5875496",
"0.5871324",
"0.58609086",
"0.58548045",
"0.582082",
"0.5820722",
"0.5794419",
"0.5784228",
"0.5780479",
"0.5768079",
"0.5665515",
"0.56541914",
"0.5653878",
"0.5646067",
"0.5603715",
"0.55895406",
"0.55769056",
"0.5568427",
"0.5537123",
"0.5513097",
"0.5499979",
"0.5484458",
"0.5476103",
"0.5467978",
"0.5460212",
"0.5441916",
"0.5433598",
"0.542156",
"0.5407243",
"0.53871816",
"0.53684264",
"0.53612655",
"0.535905",
"0.53416246",
"0.5334952",
"0.5325524",
"0.5324416",
"0.53229487",
"0.52948356",
"0.5293831",
"0.529145",
"0.5288289",
"0.5275851",
"0.52682585",
"0.525053",
"0.52451366",
"0.52363586",
"0.52165514",
"0.5185421",
"0.51830804",
"0.5163334",
"0.51590556",
"0.5145946",
"0.5142275",
"0.5132129",
"0.5130996",
"0.5129354",
"0.5121864",
"0.5116788",
"0.5104924",
"0.5102638",
"0.50713694",
"0.5063978",
"0.50620484",
"0.50550205",
"0.5038152",
"0.50308466",
"0.5030038",
"0.50136733",
"0.5000076",
"0.49898884",
"0.4989017",
"0.49838236",
"0.49805748",
"0.498001",
"0.49766868",
"0.49731654",
"0.4963711",
"0.49633914",
"0.49594223",
"0.49536785",
"0.49429297",
"0.49412343",
"0.4940066",
"0.49379718",
"0.49338156",
"0.49293056",
"0.4926298",
"0.49225715"
] | 0.70314556 | 0 |
Returns a function that applies other functions in sequence. `compose(f, g, h)(x, y)` is the same as `f(g(h(x, y)))`. If no arguments are provided, the identity function is returned. | def compose(*funcs):
if not funcs:
return identity
def wrapper(*args, **kwargs):
fst, *rest = funcs
ret = fst(*args, **kwargs)
for f in rest:
ret = f(ret)
return ret
return wrapper | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def compose(f,g):\n def composed(x):\n return f(g(x))\n\n return composed",
"def compose(*functions):\n head, *tail = functions\n return head if not tail else lambda *args, **kwargs: head(compose(*tail)(*args, **kwargs))",
"def compose(f, g):\n return lambda *args, **kwargs: f(g(*args, **kwargs))",
"def compose1(f, g):\n def h(x):\n return f(g(x))\n return h",
"def compose(*fns):\n\n if len(fns) == 0:\n raise ValueError(\"At least one function must be provided\")\n\n def composite(*args):\n x = fns[-1](*args)\n for fn in reversed(fns[0:-1]):\n x = fn(x)\n return x\n\n return composite",
"def compose1(f, g):\n return lambda x: f(g(x))",
"def compose1(f, g):\n def fn(x):\n return f(g(x));\n return fn;",
"def compose(*funcs):\n def _compose(g, f):\n return lambda *args, **kwargs: g(f(*args, **kwargs))\n return reduce(_compose, funcs)",
"def compose(*functions):\n return functools.reduce(lambda f, g: lambda x: f(g(x)), functions, lambda x: x)",
"def compose(\n f: Callable[[Any], Any],\n g: Callable[[Any], Any],\n *functions: Callable[[Any], Any]\n) -> Callable[[Any], Any]:\n fs: Tuple[Callable, ...] = ()\n for h in (f, g) + functions:\n if isinstance(h, Composition):\n fs += h.functions\n else:\n fs += (h,)\n return Composition(fs)",
"def compose(*fns):\n return functools.reduce(lambda f,g: lambda x: f(g(x)), fns)",
"def compose(*funcs):\n if not funcs:\n return identity\n else:\n f0 = funcs[0]\n def composed(_):\n # f_1 o f_2 o ... o f_n\n pre_composed = compose(*funcs[1:])\n return f0(pre_composed(_))\n return composed",
"def compose(f, g):\n def compose_impl(x):\n return bind(g)(f(x))\n\n return compose_impl",
"def compose(*fs) -> Callable:\n return lambda x: reduce(flip(funcall), reversed(fs), x)",
"def compose(*funcs: Callable[[T], T]) -> Callable[[T], T]:\n return functools.reduce(lambda g, f: lambda x: f(g(x)), funcs, lambda x: x)",
"def compose(*fns):\n import functools\n\n def _apply(x, f):\n if isinstance(x, tuple):\n return f(*x)\n else:\n return f(x)\n\n def comp(*args):\n return functools.reduce(_apply, fns, args)\n\n return comp",
"def compose(inner, *others):\n for i, func in enumerate((inner,)+others):\n assert(callable(func)), \"function #{0} is not callable.\".format(i)\n \n accumulator = inner\n for func in others:\n accumulator = _compose(accumulator, func) \n return accumulator",
"def compose(*funcs):\n # return lambda x: reduce(lambda v, f: f(v), funcs, x)\n if funcs:\n return reduce(lambda f, g: lambda *a, **kw: g(f(*a, **kw)), funcs)\n else:\n raise ValueError(\"Composition of empty sequence not supported.\")",
"def compose(*funcs):\n # return lambda x: reduce(lambda v, f: f(v), funcs, x)\n if funcs:\n return reduce(lambda f, g: lambda *a, **kw: g(f(*a, **kw)), funcs)\n else:\n raise ValueError('Composition of empty sequence not supported.')",
"def compose(*funcs):\n return reduce(lambda f, g: lambda x: f(g(x)), funcs[::-1])",
"def compose1(*functions: _ComposeArg[_T]) -> _Transform[_T]:\n def composition(arg, **kwargs):\n for f in reversed(functions):\n if isinstance(f, tuple):\n f, kws = f\n arg = f(arg, **{kw: kwargs[kw] for kw in kws})\n else:\n arg = f(arg)\n return arg\n return composition",
"def compose(*funcs):\n if funcs:\n return reduce(lambda f, g: lambda *a, **kw: g(f(*a, **kw)), funcs)\n else:\n raise ValueError('Composition of empty sequence not supported.')",
"def compose(func_1, func_2, unpack=False):\n if not callable(func_1):\n raise TypeError(\"First argument to compose must be callable\")\n if not callable(func_2):\n raise TypeError(\"Second argument to compose must be callable\")\n \n if unpack:\n def composition(*args, **kwargs):\n return func_1(*func_2(*args, **kwargs))\n else:\n def composition(*args, **kwargs):\n return func_1(func_2(*args, **kwargs))\n return composition",
"def compose(self, *funcs) -> \"fn\":\n return self._mod.compose(self, *funcs)",
"def mcompose(*mfuncs):\n return functools.partial(foldl, bind, tuple(reversed(mfuncs)))",
"def _compose(inner, outer):\n @functools.wraps(outer)\n def composed(*a, **kw ): #pylint: disable=C0111\n return outer(inner(*a, **kw))\n return composed",
"def compose(transforms):\n assert isinstance(transforms, list)\n for transform in transforms:\n assert callable(transform), \"list of functions expected\"\n\n def composition(obj):\n \"Composite function\"\n for transform in transforms:\n obj = transform(obj)\n return obj\n return composition",
"def composition(func_list):\n return reduce(\n lambda (f1, args1), (f2, args2) : (lambda x : f1(f2(x, *args2), *args1)), \n func_list,\n lambda x : x\n )",
"def compose(*funcs: Tuple[Callable[[GT], GS], ...]) -> Callable[[GT], GS]:\n\n def _compose(data):\n result = data\n for i, f in enumerate(funcs):\n\n def _composition_part(r):\n try:\n return f(result)\n except Exception as e:\n raise e\n msg = str(e)\n msg += \"\\nduring composition:\"\n msg += \"\\n ({}) f: {}\".format(i, f)\n msg += \"\\n args: {} {}\".format(result, result.__class__)\n raise type(e)(msg) from e\n\n result = _composition_part(result)\n return result\n\n return _compose",
"def compose(x, funcs, *args, order=\"_order\", **kwargs):\n key = lambda x: getattr(x, order, 0)\n for func in sorted(listify(funcs), key=key):\n x = func(x, *args, **kwargs)\n return x",
"def compose2(a: Callable[[AA], AR], b: Callable[[BA], AA]) -> Callable[[BA], AR]:\n a_name = _name(a)\n b_name = _name(b)\n\n def c(arg: BA) -> AR:\n f\"Function composed as {a_name}({b_name}(_)).\"\n return a(b(arg))\n\n c.__name__ = f\"{a_name}∘{b_name}\"\n return c",
"def composedfun( *args, **kwords ):\n return outerfun( innerfun( *args, **kwords ) )",
"def pipe(*functions):\n\n return reduce(compose, functions, identity)",
"def compose_expanded_args(f,g):\n def composed(*args):\n return f(*(g(*args)))\n\n return composed",
"def lift(func: Callable) -> Callable:\n return lambda f: compose2(func, f)",
"def compose_many(*fs):\n return reduce(compose, fs)",
"def apply_composition_function(x, functions):\n data_type = type(x)\n if len(functions) == 1:\n return data_type(map(functions[0], x))\n else:\n return data_type(map(functions[0], apply_composition_function(x, functions[1:])))",
"def composition(second, first, multiple_returns=True):\n if multiple_returns:\n\n def composed(*args, **kwargs):\n return second(*first(*args, **kwargs))\n\n else:\n\n def composed(*args, **kwargs):\n return second(first(*args, **kwargs))\n\n # add the docstring\n composed.__doc__ = \"\"\"Apply `{first_name}`, and then `{second_name}`, returning \"\"\"\n \"\"\"the result.\n\n Docstring of `{first_name}`:\n\n {first_doc}\n\n Docstring of `{second_name}`:\n\n {second_doc}\n \"\"\".format(\n first_name=first.__name__,\n second_name=second.__name__,\n first_doc=first.__doc__,\n second_doc=second.__doc__,\n )\n # make the __name__ attribute a concatenation of the two functions' names\n composed.__name__ = \"{}_then_{}\".format(first.__name__, second.__name__)\n return composed",
"def pipeline(\n first: Callable[[Any], Any],\n second: Callable[[Any], Any],\n *rest: Callable[[Any], Any]\n) -> Callable[[Any], Any]:\n return compose(*reversed(rest), second, first)",
"def filter_compose(*fns: T.Callable[[T.Any], bool]):\n def composite(x):\n for f in fns:\n if not f(x):\n return False\n return True\n\n return composite",
"def composition(func: Callable, *args, **kwargs):\n\n def decorator(decorated: Callable):\n @wraps(decorated)\n def wrapper(*args_, **kwargs_):\n return func(decorated(*args_, **kwargs_), *args, **kwargs)\n\n return wrapper\n\n return decorator",
"def compose(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_compose(F, G, lev, dom))",
"def compose(*ops):\n if len(ops) == 0:\n return [0, 1, 2, 3, 4, 5, 6, 7]\n if len(ops) == 1:\n return ops[0]\n if len(ops) == 2:\n op1, op2 = ops\n return [op2[op1[v]] for v in range(8)]\n op1 = ops[0]\n rest = ops[1:]\n return compose(op1, compose(*rest))",
"def compose(*coros):\n async def f(*args, **kwargs):\n empty_dict = {}\n for coro in reversed(coros):\n x = coro(*args, **kwargs)\n # Allow mixing corountines and regular functions\n if asyncio.isfuture(x):\n x = await x\n args = [x]\n kwargs = empty_dict\n\n return x\n return f",
"def chain_funcs(funcs):\n return lambda x: reduce(lambda f1, f2: f2(f1), funcs, x)",
"def make_pipeline(steps):\n def compose2(f, g):\n return lambda x: g(f(x))\n return functools.reduce(compose2, steps)",
"def mlift(func):\n return compose(unit, func)",
"def curry2(f):\n return lambda x: lambda y: f(x, y)",
"def composite_identity(f, g):\n return lambda x: f(g(x))== g(f(x))",
"def chain_layer_functions(input_layer, functions):\n return reduce(lambda layer, func: func(layer), functions, input_layer)",
"def bifunctor_composition_law(f, g, x):\n composed_fs = compose(partial(first, f), partial(second, g))\n return bimap(f, g, x) == composed_fs(x)",
"def first_composition_law(f, g, x):\n composed_funcs = compose(g, f)\n composed_firsts = compose(partial(first, g), partial(first, f))\n return first(composed_funcs, x) == composed_firsts(x)",
"def compose(self, other, qargs=None, front=False):\n pass",
"def mapcat(f):\n return compose(map(f), cat)",
"def compose(disp_1, disp_2, indexing='ij'):\n\n assert indexing == 'ij', \"currently only ij indexing is implemented in compose\"\n\n return disp_2 + transform(disp_1, disp_2, interp_method='linear', indexing=indexing)",
"def second_composition_law(f, g, x):\n composed_funcs = compose(g, f)\n composed_seconds = compose(partial(second, g), partial(second, f))\n return second(composed_funcs, x) == composed_seconds(x)",
"def compose_result(\n container: Kind3[_IOResultLikeKind, _FirstType, _SecondType, _ThirdType],\n function: Callable[\n [Result[_FirstType, _SecondType]],\n Kind3[_IOResultLikeKind, _NewFirstType, _SecondType, _ThirdType],\n ],\n) -> Kind3[_IOResultLikeKind, _NewFirstType, _SecondType, _ThirdType]:\n return container.compose_result(function)",
"def compose(transforms):\n trans_objs = [TRANSFORMS.build(t) for t in transforms]\n return tv_transforms.Compose(trans_objs)",
"def zzX_compose_term(f, K):\n def rec_compose(g, l):\n if poly_univariate_p(g):\n return zzx_compose_term(g, K[l])\n\n if K[l] <= 0:\n raise ValueError(\"All 'K[i]' must be positive, got %s\" % K[l])\n\n g = [ rec_compose(c, l+1) for c in g ]\n result, L = [g[0]], poly_level(g) - 1\n\n for coeff in g[1:]:\n for i in xrange(1, K[l]):\n result.append(zzX_zero(L))\n\n result.append(coeff)\n\n return result\n\n if all([ k == 1 for k in K ]):\n return f\n else:\n return rec_compose(f, 0)",
"def lift(f: Callable[..., Data]) -> LiftedFunc:\n def inner(*args: Result) -> Result:\n out = []\n for args1 in itertools.product(*args):\n val = f(*args1)\n out.append(val)\n return out\n return inner",
"def diffeo_compose(a, b):\n c = np.empty_like(a)\n c[:, :, 0] = diffeo_apply(b, a[:, :, 0])\n c[:, :, 1] = diffeo_apply(b, a[:, :, 1])\n return c",
"def make_product(*functions: Callable, exponents: Optional[Sequence] = None) -> Callable:\n if exponents is None:\n exponents = np.ones(shape=(len(functions)))\n else:\n assert len(functions) == len(exponents), 'the length of exponents must be the ' \\\n 'same as the number of given functions'\n\n def product_function(*args, **kwargs):\n return np.prod([functions[i](*args, **kwargs)**exponents[i]\n for i in range(len(exponents))], axis=0)\n\n return product_function",
"def accumulate(*converters):\n #Validation\n if len(converters) == 0:\n converters = (iter,)\n for i, func in enumerate(converters): #tuple of callable\n assert(callable(func)), \"converter #{0} is not callable.\".format(i)\n #Compose all converter functions\n convert = compose(*converters)\n \n @functools.wraps(convert)\n def outer(func): #pylint: disable=C0111\n @functools.wraps(func)\n def inner(*args, **kwargs): #pylint: disable=C0111\n return convert(func(*args, **kwargs))\n return inner\n return outer",
"def two():\n return lambda f: lambda x: f(f(x))",
"def sub_chain_func(chain):\n\n def func(match, *args, **kwargs):\n \"\"\"Processing match objects\n \"\"\"\n\n for sub_func in chain:\n result = sub_func(match, *args, **kwargs)\n if result[1]:\n return result[0]\n\n return match.group(0)\n\n return func",
"def compose_transforms(*transforms):\n from functools import reduce\n\n for transform in transforms:\n vg.shape.check(locals(), \"transform\", (4, 4))\n\n if len(transforms) == 0:\n return np.eye(4)\n\n return reduce(np.dot, reversed(transforms))",
"def lambda_curry2(func):\n return lambda x: lambda y: func(x, y)",
"def cons(a, b):\r\n def pair(f):\r\n return f(a, b)\r\n return pair",
"def currying(f):\n if isfunction(f):\n f = (f, f.__code__.co_argcount, ())\n\n def wrapper_func(*args):\n args = f[2] + tuple(args)\n if len(args) >= f[1]:\n return f[0](*args)\n else:\n return currying((f[0], f[1], args))\n\n return wrapper_func",
"def compose_validators(context_fn, validators):\n\n # Build a list out of the iterable, as it will have to be iterated over\n # every time the composed validator is executed (ie. for each context from\n # context_fn). Otherwise, it may exhaust the first iteration (depending on\n # the type of iterator given).\n validators = list(validators)\n\n @validator_for(context_fn)\n def composed_validator(context):\n for validator in validators:\n validator.validator_fn(context)\n\n return composed_validator",
"def concave_fun_eval(x):\r\n return np.stack([f1(x), f2(x)]), np.stack([f1_dx(x), f2_dx(x)])",
"def ReducePipeline(*funcs, **kwargs):\n def accum(val, func):\n funcArgs = kwargs.get(func.__name__, tuple())\n if hasattr(val, \"__call__\"):\n return func(val(), *funcArgs)\n else:\n return func(val, *funcArgs)\n\n def wrapper(*data):\n newFuncs = (partial(funcs[0], *data),) + funcs[1:]\n return reduce(accum, newFuncs)\n return wrapper",
"def bimap_composition_law(f, g, h, i, x):\n fg = compose(f, g)\n hi = compose(h, i)\n composed_bimap = compose(partial(bimap, f, h), partial(bimap, g, i))\n return bimap(fg, hi, x) == composed_bimap(x)",
"def multichannel(fcn):\n return lambda args: (fcn(*args), )",
"def curry(func):\n @wraps(func)\n def _curry(*args, **kwargs):\n f = func\n count = 0\n while isinstance(f, partial):\n if f.args:\n count += len(f.args)\n f = f.func\n\n spec = getargspec(f)\n\n if count == len(spec.args) - len(args):\n return func(*args, **kwargs)\n \n para_func = partial(func, *args, **kwargs)\n update_wrapper(para_func, f)\n return curry(para_func)\n \n return _curry",
"def carry(function):\n\n def dec_1(first_arg):\n def dec_2(second_arg):\n def dec_3(third_arg):\n def dec_4(fourth_arg):\n print(first_arg + second_arg +\n third_arg + fourth_arg)\n return function\n\n return dec_4\n\n return dec_3\n\n return dec_2\n\n return dec_1",
"def _apply_in_order(functions, input_data):\n return reduce(lambda res, func: func(res), functions, input_data)",
"def imap_c(func):\n return functools.partial(imap, func)",
"def curry(f: Callable) -> Callable:\n @functools.wraps(f)\n def decorator(*args: object, **kwargs: object) -> Any:\n return Curry(f)(*args, **kwargs)\n\n return decorator",
"def one():\n return lambda f: lambda x: f(x)",
"def curry(func):\n def curried(*args, **kwargs):\n return func(args, **kwargs)\n return curried",
"def call(f):\n def g(*args, **kwds):\n return (f, args, kwds)\n return g",
"def flip(f: Callable[[A, B], Any]) -> Callable[[B, A], Any]:\n return lambda x, y: f(y, x)",
"def apply_function(f, args):\n if len(signature(f).parameters) == len(args):\n func = curry(f)\n for arg_value in args:\n func = func(arg_value)\n return func()\n else:\n raise Exception(\"the number of function's parameter is not matched args, len(args): \", len(args))",
"def _get_intermediate_simp(deffunc=lambda x: x, offfunc=lambda x: x,\n onfunc=_dotprodsimp, dotprodsimp=None):\n\n if dotprodsimp is False or _dotprodsimp_state.state is False:\n return offfunc\n if dotprodsimp is True or _dotprodsimp_state.state is True:\n return onfunc\n\n return deffunc # None, None",
"def apply(self, *input_):\n result = None\n for function in reversed(self._functions):\n if result is None:\n result = function(*input_)\n else:\n result = function(result)\n return result",
"def test_compose_front(self):\n # Random input test state\n rho = self.rand_rho(2)\n\n # UnitaryChannel evolution\n chan1 = Chi(self.chiX)\n chan2 = Chi(self.chiY)\n chan = chan2.compose(chan1, front=True)\n targ = Chi(self.chiZ)._evolve(rho)\n self.assertAllClose(chan._evolve(rho), targ)\n\n # Compose random\n chi1 = self.rand_matrix(4, 4, real=True)\n chi2 = self.rand_matrix(4, 4, real=True)\n chan1 = Chi(chi1, input_dims=2, output_dims=2)\n chan2 = Chi(chi2, input_dims=2, output_dims=2)\n targ = chan2._evolve(chan1._evolve(rho))\n chan = chan2.compose(chan1, front=True)\n self.assertEqual(chan.dim, (2, 2))\n self.assertAllClose(chan._evolve(rho), targ)",
"def build_transforms(transforms_config: List[Dict[str, Any]]) -> Callable:\n transform_list = [build_transform(config) for config in transforms_config]\n return transforms.Compose(transform_list)",
"def lambda_curry2(func):\n def fn(x):\n return lambda y: func(x, y);\n return fn;",
"def dispatch_function(func: Callable[..., Hashable]) -> Callable[..., Callable[..., Any]]:\n\n registry = {}\n\n def dispatch(key):\n if key not in registry:\n raise KeyError('Unknown dispatch key')\n return registry[key]\n\n def inner(*args, **kwargs):\n return dispatch(func(*args, **kwargs))(*args, **kwargs)\n\n def register(key, target=None):\n if target is None:\n return lambda f: register(key, f)\n registry[key] = target\n return target\n\n inner.register = register\n return inner",
"def multimap(funcs, iterable):\n\n for f in funcs:\n iterable = map(f, iterable)\n\n return iterable",
"def flip(f: Callable) -> Callable:\n return curry(lambda *args, **kwargs: f(*reversed(args), **kwargs))",
"def tee_pipe(*funcs: Tuple[Callable[[GT], GS], ...]) -> Callable[[GT], GT]:\n\n piped = compose(*funcs)\n\n def _tee_pipe(arr):\n a, b = itertools.tee(arr)\n piped(a)\n return b\n\n return _tee_pipe",
"def make_linear_combination(*functions: Callable, weights: Optional[Sequence] = None) -> Callable:\n if weights is None:\n weights = np.ones(shape=(len(functions)))\n else:\n assert len(functions) == len(weights), 'the length of weights must be the ' \\\n 'same as the number of given functions'\n\n def linear_combination(*args, **kwargs):\n return sum((weights[i]*functions[i](*args, **kwargs) for i in range(len(weights))))\n\n return linear_combination",
"def _pairing_func(x, a, b, c, d, e, f):\n y = a * np.exp(-((b * x) ** c)) + d * np.exp(-((e * x) ** f))\n\n return y",
"def _pairing_func(x, a, b, c, d, e, f):\n y = a * np.exp(-(b * x)**c) + d * np.exp(-(e * x)**f)\n\n return y",
"def add(m, n):\n return lambda f: lambda x: m(f)(\n n(f)(x)\n )",
"def generic(combiner=None):\n\n from dispatch.functions import GenericFunction, AbstractGeneric\n from peak.util.decorators import decorate_assignment\n\n\n\n\n if combiner is None:\n def callback(frm,name,value,old_locals):\n return GenericFunction(value).delegate\n elif isinstance(combiner,_cls) and issubclass(combiner,AbstractGeneric):\n def callback(frm,name,value,old_locals):\n return combiner(value).delegate\n else:\n def callback(frm,name,value,old_locals):\n gf = GenericFunction(value)\n gf.combine = combiner\n return gf.delegate\n\n return decorate_assignment(callback)",
"def f_unc(x, a, b):\n return f_raw(x, a, b)",
"def reducer(functions, init_value):\n return reduce(lambda res, func: func(res), functions, init_value)"
] | [
"0.81175",
"0.7936012",
"0.7929636",
"0.7896892",
"0.7852169",
"0.7835173",
"0.7823427",
"0.78121835",
"0.770165",
"0.7692192",
"0.76420945",
"0.7633142",
"0.758357",
"0.75239193",
"0.7490701",
"0.7352416",
"0.73518074",
"0.72612894",
"0.7251908",
"0.7217228",
"0.7167121",
"0.7161701",
"0.71265584",
"0.70775187",
"0.70261246",
"0.69543576",
"0.69515145",
"0.692601",
"0.67915213",
"0.67433625",
"0.66788083",
"0.65897965",
"0.6522483",
"0.6448424",
"0.63213",
"0.6251861",
"0.62451416",
"0.620738",
"0.6141059",
"0.6117234",
"0.6074914",
"0.60677266",
"0.5992106",
"0.59655946",
"0.5693025",
"0.5591725",
"0.5581494",
"0.55145276",
"0.54963136",
"0.54464775",
"0.54320097",
"0.5420432",
"0.53169245",
"0.5242636",
"0.52040374",
"0.518089",
"0.5146899",
"0.5144732",
"0.512521",
"0.5117253",
"0.50763094",
"0.50400484",
"0.5038225",
"0.5038014",
"0.49867466",
"0.49673006",
"0.49490353",
"0.49442518",
"0.4933605",
"0.49193153",
"0.4902391",
"0.48968956",
"0.48901242",
"0.48848882",
"0.48809528",
"0.4864244",
"0.4808455",
"0.48029262",
"0.47934887",
"0.4785965",
"0.4750614",
"0.47352037",
"0.47038814",
"0.47014007",
"0.4688603",
"0.46867618",
"0.46687147",
"0.4668517",
"0.46524742",
"0.46444374",
"0.46418792",
"0.46406782",
"0.46398494",
"0.4637883",
"0.46238357",
"0.46227598",
"0.46206465",
"0.4604492",
"0.4601849",
"0.4588685"
] | 0.7443417 | 15 |
Print a `middleware_name` with a right arrow if `_VERBOSE_MODE` is on. | def _print_inwards(middleware_name):
if _VERBOSE_MODE:
print('{}--->'.format(middleware_name)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _print_outwards(middleware_name):\n if _VERBOSE_MODE:\n print('<---{}'.format(middleware_name))",
"def named(name):\n\n def new_annotate(mware):\n def new_middleware(handler):\n\n new_handler = mware(handler)\n\n def verbose_handler(ctx):\n _print_inwards(name)\n\n new_ctx = new_handler(ctx)\n\n _print_outwards(name)\n\n return new_ctx\n\n return verbose_handler\n\n return new_middleware\n\n return new_annotate",
"def _verboseHeader(self):\n\n if verbose:\n name = self._getName()\n methodName = self._getMethodName()\n\n title = f\"Running {name}.{methodName}\"\n print('{}\\n{}'.format(title, '-' * len(title)))",
"def v_print(msg):\n if (VERBOSE == 1):\n print(msg)",
"def showsession():\n for property,value in middleware.__dict__.items():\n if property.startswith('_') and not callable(property): continue\n if property in ['ixn', 'portMgmtObj', 'fileMgmtObj', 'protocolObj', 'statsObj']: continue\n print('\\t{0}: {1}'.format(property, value))",
"def show(self, *args, prefix=None):\n if prefix is None:\n prefix = '$'\n if self.verbose >= 2:\n print(prefix, *args)",
"def print_debug(message):\n if current_app.debug:\n print(message)",
"def verbose_print(msg: str = '') -> None:\n assert isinstance(msg, str)\n if __verbose:\n print(msg)",
"def cli(debug):\n print(f\"Debug mode is {'on' if debug else 'off'}\")",
"def DEBUG(*args, **kwargs):\n if __name__ != \"__main__\":\n print(*args, **kwargs)",
"def debug_print(self, *args, **kwargs):\n print(\"APP_DEBUG_PRINT\", args, kwargs)",
"def debug(string):\n if verbose:\n print string\n return",
"def show_details(name, f, is_partial=False):\n print '%s:' % name\n print '\\tobject:', f\n if not is_partial:\n print '\\t__name__:', f.__name__\n print '\\t__doc__', repr(f.__doc__)\n if is_partial:\n print '\\tfunc:', f.func\n print '\\targs:', f.args\n print '\\tkeywords:', f.keywords\n return",
"def debug():\n def _debug(x):\n return e.String(x.as_source())\n yield (\"(λ any . str)\", _debug)",
"def vprint(string):\n global verbose\n if verbose:\n print(string)",
"def stk_logger(context, msg: str):\n if not context:\n logger.info(msg)\n return\n uc = context.use_case()\n if (msg[:2] != \"->\") or (uc == \"\"):\n logger.info(msg)\n return\n logger.info(f\"-> {msg[2:]} uc={uc}\")\n return",
"def debug(s):\n if app.config['DEBUG']:\n print(s)",
"def debug(s):\n if app.config['DEBUG']:\n print(s)",
"def vprint(msg):\n if defaults.verbose:\n print(msg)",
"def verbose_print(verbose, print_function=None):\n\n if verbose:\n return print_function or print\n else:\n def vprint(*args, **kwars):\n pass\n return vprint",
"def __debugInfo(self, msg):\n\t\tif self.verbosity:\n\t\t\tprint(stylize(\"[*] DEBUG: {}\".format(msg), colored.fg(\"wheat_1\")))",
"def debug(msg):\n if settings.DEBUG:\n print \"DEBUG: cli.%(msg)s\" % locals()",
"def _verbose(self,text):\n if self.verbose:\n print(text)",
"def write_debug_info(self):\n #path = self.request.uri.split('?')[0]\n #method = path.split('/')[-1]\n \n self.write(\"Handler: \" + str(self.__class__.__name__)+\"<br>\")\n self.write(\"<hr>\")\n self.write(str(dir(self.request)))\n self.write(\"<br><hr>\")\n self.write(\"query_arguments:\" + str(self.request.query_arguments))\n self.write(\"<br>\")\n self.write(\"uri:\" + self.uri)\n self.write(\"<br>\")\n self.write(\"path:\" + self.path)\n self.write(\"<br>\")\n self.write(\"method to call: \" + self.request.method.lower() + \"_\" + self.method)\n self.write(\"<hr>\")\n self.write(\"request method: \" + self.request.method)\n self.write(\"<hr>\")\n self.write(\"request headers: \" + str(self.request.headers))\n self.write(\"<hr>\")\n self.flush()",
"def print_debug(context: str = \"\") -> None:\r\n print(context)\r\n print(\"This is the current board\")\r\n print(example)\r\n print(\"This is the conflict space\")\r\n print(conflict_space)\r\n print(\"This is the safeboard\")\r\n print(safeboard)",
"def vprint(*args, **kwargs ):\n\n forceprint = False\n for key in kwargs:\n if key == \"forceprint\":\n forceprint =kwargs[key]\n \n line = ''\n if debug or forceprint : \n for arg in args:\n line += str(arg) +\" \"\n log = open(exepath + 'pyframe.log', 'a') \n log.write(line + \"\\n\")\n log.close() \n print line",
"def _vprint(self, string):\n if self.verbose:\n print(string)",
"def verbose_print(text,verbose_level):\n if Args.verbose >= verbose_level:\n print '\\t' * (verbose_level-1) + text",
"def debug_print(debug_data):\n if DEBUG_MODE == \"true\":\n pp.pprint(debug_data)",
"def main(debug):\n click.echo('Debug mode is {{}}'.format(debug))",
"def print_verbose(self) -> None:\n print(self)\n if self.meta is not None:\n print(self.meta.__repr__())",
"def print_debug(msg):\n if IS_DEBUG:\n print(msg)",
"def debug_print(text):\r\n if settings.debug:\r\n print (text)",
"def output_debug_info(self):",
"def print_debug(message: str):\n global debug\n if debug:\n print(\"%s%s%s\" % (KCYN, message, KNRM))",
"def debugargs(prefix='***'):\n def debug(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n print(prefix + ': ' + func.__qualname__)\n return func(*args, **kwargs)\n return wrapper\n return debug",
"def _debugGenerator(self, generatorName, obj):\n\n debug.println(debug.LEVEL_FINER,\n \"GENERATOR: %s\" % generatorName)\n debug.println(debug.LEVEL_FINER,\n \" obj = %s\" % obj.name)\n debug.println(debug.LEVEL_FINER,\n \" role = %s\" % obj.getRoleName())",
"def show_trailer(self):",
"def print_info(message: str):\n global verbose\n if verbose:\n print(\"%s%s%s\" % (KYEL, message, KNRM))",
"def _print(self, *args, **kwargs) -> None:\n # Only print in verbose mode\n if self._verbose:\n arglist = list(args)\n arglist[0] = f\"[buddy-{self._experiment_name}] {args[0]}\"\n print(*arglist, **kwargs)",
"def curried_printer(*args):\n if pretty:\n pretty_log(function_name, args)\n else:\n generic_log(function_name, args)\n return function_name",
"def print_trace(view_func):\r\n @wraps(view_func, assigned=available_attrs(view_func))\r\n def _wrapped_view_func(request, *args, **kwargs):\r\n try:\r\n return view_func(request, *args, **kwargs)\r\n except:\r\n import traceback\r\n traceback.print_exc()\r\n return _wrapped_view_func",
"def test_middleware_loads(self):\n self.client.get(\"/__debug__\")",
"def debug_print(self):\n print self.title\n print self.storyline\n print self.poster_image_url\n print self.trailer_youtube_url\n print \"------\"",
"def print_debug(obj):\n\n if not DEBUG:\n return False\n\n if hasattr(obj, 'lower'):\n # string/unicode... just print it.\n print('Debug: {}'.format(obj))\n elif isinstance(obj, (list, tuple)):\n # list/tuple, format it...\n header = 'Debug: '\n spaces = (' ' * len(header))\n if obj:\n print('{}{}'.format(header, obj[0]))\n if len(obj) > 1:\n otherlines = '\\n{}'.format(spaces).join(obj[1:])\n print('\\n{}{}'.format(spaces, otherlines))\n else:\n # different type of object\n print('Debug: {!r}'.format(obj))\n\n return True",
"def showconnecttoapiserver():\n print('\\n{0}'.format(middleware.preference.apiServerType))",
"def DEBUG_PRINT(msg, obj='', suffix=''):\n if PRINT_DEBUGS:\n print msg, obj, suffix",
"def debug_print(msg):\n\n if not MKL.MKL_DEBUG:\n return\n else:\n print(msg)",
"def output_debug(text):\n if conf.debug:\n output_message('[DEBUG] ' + text)",
"def test_pydotprint_long_name():\r\n\r\n # Skip test if pydot is not available.\r\n if not theano.printing.pydot_imported:\r\n raise SkipTest('pydot not available')\r\n\r\n x = tensor.dvector()\r\n mode = theano.compile.mode.get_default_mode().excluding(\"fusion\")\r\n f = theano.function([x], [x * 2, x + x], mode=mode)\r\n f([1, 2, 3, 4])\r\n\r\n s = StringIO()\r\n new_handler = logging.StreamHandler(s)\r\n new_handler.setLevel(logging.DEBUG)\r\n orig_handler = theano.logging_default_handler\r\n\r\n theano.printing.pydotprint(f, max_label_size=5,\r\n print_output_file=False,\r\n assert_nb_all_strings=6)",
"def vprint (*args, take_action=False, **kwargs):\n\n take_action = take_action and not opts.take_action\n\n if opts.verbose or take_action:\n print (*args, **kwargs)\n\n return take_action",
"def _debug_print(message):\n\n if _debug == True:\n print(message)",
"def verbose(string, level, indent=None):\n if args.verbose:\n if args.verbose > level:\n if indent is None:\n if level <= LEVEL_4:\n indent = \" \" * level\n else:\n indent = \" \"\n print (indent + string)\n return",
"def middleware(name, *args, **kwargs):\n\n def new_annotate(g_fn):\n def new_middleware(handler):\n def new_handler(ctx):\n _print_inwards(name)\n\n g = g_fn(ctx, *args, **kwargs)\n\n changed_ctx = next(g)\n new_ctx = handler(changed_ctx)\n last_ctx = g.send(new_ctx)\n\n _print_outwards(name)\n\n return last_ctx\n\n return new_handler\n\n return new_middleware\n\n return new_annotate",
"def printv(self, string, **kwargs):\n if self.verbose:\n print(string, **kwargs)",
"def show(self,verbose=0):\n print 'inferenceArgs',self.ws.inferenceArgs\n print 'inferenceExpr',theano.pp(self.ws.inferenceExpr)\n if verbose>=1:\n print 'debugprint inferenceExpr:'\n theano.printing.debugprint(self.ws.inferenceExpr)\n if self.ws.dataLossExpr:\n print 'dataLossArgs',self.ws.dataLossArgs\n print 'dataLossExpr',theano.pp(self.ws.dataLossExpr)\n print 'debugprint dataLossExpr:'\n theano.printing.debugprint(self.ws.dataLossExpr)",
"def create_print(prefix):\n def inner(*args):\n print prefix + str(args)\n return inner",
"def debug(version=False):\n if version:\n print(__version__)\n return\n print(json.dumps(MANAGE_DICT, indent=2))",
"def d_print(msg):\n if (DEBUG == 1):\n print(msg)",
"def __debug(msg):\n\n pass",
"def debug(func):\n if VERBOSE > 0:\n @functools.wraps(func)\n def wrapper_debug(*args, **kwargs):\n args_repr = [repr(a) for a in args]\n kwargs_repr = [f\"{k}={v!r}\" for k, v in kwargs.items()]\n signature = \", \".join(args_repr + kwargs_repr)\n\n print(f\"Calling {func.__name__}({signature})\\n\")\n value = func(*args, **kwargs)\n print(f\"{func.__name__!r} returned {value!r}\\n\")\n\n return value\n\n return wrapper_debug\n else:\n return func",
"def debug_print(*a):\n if enable_debug_output:\n print(' '.join(map(str, a)))",
"def known_verbose_name():\n return 'test Verbose name'",
"def printv(self, *arg):\n if self.verbose:\n print(*arg)",
"def print_verbose(message:str):\n if params['verbose']:\n print(message)\n return",
"def debugPrint(text: str):\r\n if DEBUG:\r\n print(text)",
"def do_showlocals(self, line):\n if(Rsp.state != STOPPED):\n self.output = \"Command only possible during STOPPED-state.\"\n return\n curfunc = get_func(Rsp.pc) \n self.output = \"Funktion:%s\"%curfunc\n stackmap = funcmap[curfunc].stacklocals\n regmap = funcmap[curfunc].reglocals\n for var in stackmap:\n self.output += \"%s:%s\\n\"%(var, typemap[stackmap[var].type].name) \n for var in regmap:\n self.output += \"%s:%s\\n\"%(var, typemap[regmap[var].type].name)",
"def verbose(ctx, msg, *args):\n if ctx.verbose:\n info(msg, *args)",
"def get_debug(quiet):\n if quiet:\n def debug(*args, **kwargs):\n pass\n else:\n def debug(*args, **kwargs):\n print(*args, **kwargs)\n return debug",
"def debug_print(function):\n def debug(thing):\n print(function(thing))\n return thing\n return debug",
"def debug(func):\n\n @functools.wraps(func)\n def decorated(*args, **kwargs):\n if args and not kwargs:\n print(\"~ input of {}: args: {}\".format(func.__name__, args))\n elif not args and kwargs:\n print(\"~ input of {}: kwargs: {}\".format(func.__name__, kwargs))\n elif args and kwargs:\n print(\"~ input of {}: args: {}, kwargs: {}\".format(func.__name__, args, kwargs))\n else:\n print(\"~ input of {}: NO_ARGS\".format(func.__name__))\n output = func(*args, **kwargs) # stores the result of the function\n print(\"~ output of {}:\".format(func.__name__), output)\n return output\n\n return decorated",
"def pprint(obj):\n for argname in sorted([x for x in dir(obj) if not x.startswith('__')]):\n # Skip callables\n if hasattr(getattr(obj, argname), '__call__'):\n continue\n print(\"{} : {}\".format(argname, getattr(obj, argname)))",
"def print_verbose(args, msg):\n if args.verbose:\n print(msg)",
"def debug(cls, message):\n if cls.verbose:\n print('[DEBUG] {0}'.format(message))",
"def trace(msg, minLevel=1):\n global verbose\n if verbose >= minLevel:\n tracePrint(msg)",
"def debug():",
"def _p(self, *args, level=2, **kwargs):\n if self._verbosity >= level:\n print(*args, **kwargs)",
"def is_verbose() -> bool:\n return VERBOSE",
"def _show(self, indent = 0):\n print(\" \"*indent, \"Name:\", self.name)\n print(\" \"*indent, \"Description:\", self.description)",
"def _print_matrix_info(mtrx, name):\r\n pr = lambda t: print(\"ht3_solver:\\t\" + t)\r\n pr(\"MATRIX INFO:\")\r\n pr(\"Matrix:\\t\" + name)\r\n pr(\"Description:\\t\" + str(mtrx.description))\r\n pr(\"Shape:\\t\" + str(mtrx.shape))",
"def debug(string):\n if conf.DEBUG:\n outputs.print_debug(string)",
"def _pretty_print_token(self, token):\n INLINE = 0\n BOL = 1\n extended_print = ('ID', 'INT', 'FLOAT', 'STRING')\n next_line_tokens = ('NEWLINE', 'INDENT', 'DEDENT')\n\n if self.printer_state == BOL:\n self.printer_state = INLINE\n\n print(str(token.lineno) + self.level * \" \", end=' ')\n\n if token is None:\n pass\n elif token.type in next_line_tokens:\n if token.type == \"INDENT\":\n self.level += 1\n elif token.type == \"DEDENT\":\n self.level -= 1\n\n print(token.type + '\\n', end=' ')\n self.printer_state = BOL\n elif token.type in extended_print:\n print('(' + token.type + ', ' + str(token.value) + ')', end=' ')\n else:\n print(token.type, end=' ')",
"def debug_print(self, *content):\n if self.debug:\n print(*content)",
"def debug(self) -> str:\n\n return Widget.debug(self)",
"def debug(self, msg=\"\"):\n if self.verbose:\n print(\"Debug: \" + msg)",
"def debug_decorator(func):\n\n def wrapper(*args, **kwargs):\n\n from main_loop import debug_mode\n\n if debug_mode:\n\n game_logger.logging.debug(\"Function name: \" + func.__name__)\n\n game_logger.logging.debug(\"Args: \")\n game_logger.logging.debug(args)\n\n game_logger.logging.debug(\"Kwargs: \")\n game_logger.logging.debug(kwargs)\n\n return func(*args, **kwargs)\n\n return wrapper",
"def debug(message: str) -> None:\n if is_verbose():\n print(message)\n else:\n sys.stdout.write(\".\")\n sys.stdout.flush()",
"def f_Dumpfname(func):\n @wraps(func)\n def echo_func(*func_args, **func_kwargs):\n if DEBUG: print('func \\033[1;31m {}()\\033[0m called by \\033[1;31m{}() \\033[0m'.format(func.__name__,sys._getframe(1).f_code.co_name))\n return func(*func_args, **func_kwargs)\n return echo_func",
"def vprint(expr, **settings):\n\n outstr = vsprint(expr, **settings)\n\n import builtins\n if (outstr != 'None'):\n builtins._ = outstr\n print(outstr)",
"def print_debug(msg, category, stepsback=1):\n if debug.is_on(category):\n if config.cfg.helpful_debugging:\n # Get caller info\n fn, lineno, funcnm = inspect.stack()[stepsback][1:4]\n to_print = colour.cstring(\"DEBUG %s [%s:%d - %s(...)]:\\n\" %\n (category.upper(),\n os.path.split(fn)[-1], lineno,\n funcnm), 'debughdr')\n msg = msg.replace('\\n', '\\n ')\n to_print += colour.cstring(\" %s\" % msg, 'debug')\n else:\n to_print = colour.cstring(msg, 'debug')\n sys.stderr.write(to_print + '\\n')\n sys.stderr.flush()",
"def print_frame(self, name, frame, on_svr=False):\n\n name = \"print{}: {}\".format(self.step, name)\n\n # print using svr\n if on_svr:\n svr.debug(name, frame)\n\n # print using openCV\n else: \n self.debug_stream(name, frame)\n\n # increment step counter\n self.step += 1",
"def show_class_details(name, f):\n print '%s:' % name\n print '\\tobject:', f\n print '\\t__name__:', \n try:\n print f.__name__\n except AttributeError:\n print '(no __name__)'\n print '\\t__doc__', repr(f.__doc__)\n return",
"def debug_print(*args, sep=' ', end='\\n', file=sys.stdout, flush=False, lvl=1):\n if debuglvl >= lvl:\n print(*args, sep=sep, end=end, file=file, flush=flush)",
"def dprint(msg):\n if defaults.debug:\n print('debug: %s' % msg)",
"def debug_print(*items):\n\t\n\tif items and app.config['DEBUG']:\n\t\tprint ' '.join([str(item) for item in items])",
"def show_reflector(self):\n\n return self.reflector.reflector_name",
"def verbose():\n return _verbose",
"def trace(filler):\n @decorator\n def dec(func):\n def wrapper(*args, **kwargs):\n indent = filler * wrapper.level\n arguments = ', '.join(str(x) for x in args)\n print('{} --> {}({})'.format(indent, func.__name__, arguments))\n wrapper.level += 1\n\n result = func(*args, **kwargs)\n print('{} <-- {}({}) == {}'.format(indent, func.__name__, arguments, result))\n wrapper.level -= 1\n return result\n wrapper.level = 0\n return wrapper\n return dec",
"def dprint(msg, debug):\n if debug:\n six.print_(msg)",
"def verbosePrint(string, nonl=False):\n if not verbose:\n return\n if nonl:\n print(string, end=' ')\n else:\n print(string)"
] | [
"0.8443991",
"0.59464276",
"0.5638737",
"0.560429",
"0.5601479",
"0.5314763",
"0.5303445",
"0.5292417",
"0.528729",
"0.5279358",
"0.52545273",
"0.5227158",
"0.5212542",
"0.5212174",
"0.52042764",
"0.51971495",
"0.5194814",
"0.5194814",
"0.5190525",
"0.5150018",
"0.5143002",
"0.5138631",
"0.5076536",
"0.5073388",
"0.5059901",
"0.50518113",
"0.5045145",
"0.50286466",
"0.5023738",
"0.5022795",
"0.5012875",
"0.50118715",
"0.5007756",
"0.5004641",
"0.49988776",
"0.49922383",
"0.49920073",
"0.49595347",
"0.49478707",
"0.49395904",
"0.4925171",
"0.49206284",
"0.49138156",
"0.48939925",
"0.48783284",
"0.48602587",
"0.48544776",
"0.48328745",
"0.48323095",
"0.48302248",
"0.48292142",
"0.48274156",
"0.48273593",
"0.48218566",
"0.4802637",
"0.47989216",
"0.47986862",
"0.47935036",
"0.47842658",
"0.4777876",
"0.4773351",
"0.47692257",
"0.47501746",
"0.47487694",
"0.47485536",
"0.47472137",
"0.47394848",
"0.4739213",
"0.473699",
"0.47365537",
"0.47349733",
"0.47310463",
"0.47284436",
"0.47248244",
"0.4716372",
"0.47150576",
"0.4709113",
"0.47042012",
"0.47038022",
"0.4694043",
"0.46790117",
"0.46786493",
"0.46701682",
"0.46688277",
"0.46673036",
"0.4662279",
"0.46538925",
"0.4651671",
"0.46492976",
"0.46459863",
"0.46425214",
"0.4641169",
"0.46399748",
"0.46395814",
"0.4637177",
"0.46365276",
"0.46360266",
"0.46313575",
"0.46287036",
"0.4627421"
] | 0.85856086 | 0 |
Print a `middleware_name` with a left arrow if `_VERBOSE_MODE` is on. | def _print_outwards(middleware_name):
if _VERBOSE_MODE:
print('<---{}'.format(middleware_name)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _print_inwards(middleware_name):\n if _VERBOSE_MODE:\n print('{}--->'.format(middleware_name))",
"def named(name):\n\n def new_annotate(mware):\n def new_middleware(handler):\n\n new_handler = mware(handler)\n\n def verbose_handler(ctx):\n _print_inwards(name)\n\n new_ctx = new_handler(ctx)\n\n _print_outwards(name)\n\n return new_ctx\n\n return verbose_handler\n\n return new_middleware\n\n return new_annotate",
"def _verboseHeader(self):\n\n if verbose:\n name = self._getName()\n methodName = self._getMethodName()\n\n title = f\"Running {name}.{methodName}\"\n print('{}\\n{}'.format(title, '-' * len(title)))",
"def showsession():\n for property,value in middleware.__dict__.items():\n if property.startswith('_') and not callable(property): continue\n if property in ['ixn', 'portMgmtObj', 'fileMgmtObj', 'protocolObj', 'statsObj']: continue\n print('\\t{0}: {1}'.format(property, value))",
"def v_print(msg):\n if (VERBOSE == 1):\n print(msg)",
"def stk_logger(context, msg: str):\n if not context:\n logger.info(msg)\n return\n uc = context.use_case()\n if (msg[:2] != \"->\") or (uc == \"\"):\n logger.info(msg)\n return\n logger.info(f\"-> {msg[2:]} uc={uc}\")\n return",
"def show(self, *args, prefix=None):\n if prefix is None:\n prefix = '$'\n if self.verbose >= 2:\n print(prefix, *args)",
"def DEBUG(*args, **kwargs):\n if __name__ != \"__main__\":\n print(*args, **kwargs)",
"def debug():\n def _debug(x):\n return e.String(x.as_source())\n yield (\"(λ any . str)\", _debug)",
"def print_debug(message):\n if current_app.debug:\n print(message)",
"def cli(debug):\n print(f\"Debug mode is {'on' if debug else 'off'}\")",
"def debug(msg):\n if settings.DEBUG:\n print \"DEBUG: cli.%(msg)s\" % locals()",
"def debug_print(self, *args, **kwargs):\n print(\"APP_DEBUG_PRINT\", args, kwargs)",
"def debug(s):\n if app.config['DEBUG']:\n print(s)",
"def debug(s):\n if app.config['DEBUG']:\n print(s)",
"def debug(string):\n if verbose:\n print string\n return",
"def debugargs(prefix='***'):\n def debug(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n print(prefix + ': ' + func.__qualname__)\n return func(*args, **kwargs)\n return wrapper\n return debug",
"def verbose_print(msg: str = '') -> None:\n assert isinstance(msg, str)\n if __verbose:\n print(msg)",
"def main(debug):\n click.echo('Debug mode is {{}}'.format(debug))",
"def __debugInfo(self, msg):\n\t\tif self.verbosity:\n\t\t\tprint(stylize(\"[*] DEBUG: {}\".format(msg), colored.fg(\"wheat_1\")))",
"def print_debug(msg):\n if IS_DEBUG:\n print(msg)",
"def print_debug(context: str = \"\") -> None:\r\n print(context)\r\n print(\"This is the current board\")\r\n print(example)\r\n print(\"This is the conflict space\")\r\n print(conflict_space)\r\n print(\"This is the safeboard\")\r\n print(safeboard)",
"def vprint(msg):\n if defaults.verbose:\n print(msg)",
"def vprint(string):\n global verbose\n if verbose:\n print(string)",
"def vprint(*args, **kwargs ):\n\n forceprint = False\n for key in kwargs:\n if key == \"forceprint\":\n forceprint =kwargs[key]\n \n line = ''\n if debug or forceprint : \n for arg in args:\n line += str(arg) +\" \"\n log = open(exepath + 'pyframe.log', 'a') \n log.write(line + \"\\n\")\n log.close() \n print line",
"def show_details(name, f, is_partial=False):\n print '%s:' % name\n print '\\tobject:', f\n if not is_partial:\n print '\\t__name__:', f.__name__\n print '\\t__doc__', repr(f.__doc__)\n if is_partial:\n print '\\tfunc:', f.func\n print '\\targs:', f.args\n print '\\tkeywords:', f.keywords\n return",
"def create_print(prefix):\n def inner(*args):\n print prefix + str(args)\n return inner",
"def debug_print(msg):\n\n if not MKL.MKL_DEBUG:\n return\n else:\n print(msg)",
"def debug_print(debug_data):\n if DEBUG_MODE == \"true\":\n pp.pprint(debug_data)",
"def write_debug_info(self):\n #path = self.request.uri.split('?')[0]\n #method = path.split('/')[-1]\n \n self.write(\"Handler: \" + str(self.__class__.__name__)+\"<br>\")\n self.write(\"<hr>\")\n self.write(str(dir(self.request)))\n self.write(\"<br><hr>\")\n self.write(\"query_arguments:\" + str(self.request.query_arguments))\n self.write(\"<br>\")\n self.write(\"uri:\" + self.uri)\n self.write(\"<br>\")\n self.write(\"path:\" + self.path)\n self.write(\"<br>\")\n self.write(\"method to call: \" + self.request.method.lower() + \"_\" + self.method)\n self.write(\"<hr>\")\n self.write(\"request method: \" + self.request.method)\n self.write(\"<hr>\")\n self.write(\"request headers: \" + str(self.request.headers))\n self.write(\"<hr>\")\n self.flush()",
"def debug_print(text):\r\n if settings.debug:\r\n print (text)",
"def middleware(name, *args, **kwargs):\n\n def new_annotate(g_fn):\n def new_middleware(handler):\n def new_handler(ctx):\n _print_inwards(name)\n\n g = g_fn(ctx, *args, **kwargs)\n\n changed_ctx = next(g)\n new_ctx = handler(changed_ctx)\n last_ctx = g.send(new_ctx)\n\n _print_outwards(name)\n\n return last_ctx\n\n return new_handler\n\n return new_middleware\n\n return new_annotate",
"def print_debug(message: str):\n global debug\n if debug:\n print(\"%s%s%s\" % (KCYN, message, KNRM))",
"def printHeader(self,debugLevel=0):\n if self.headerLogger.level==logging.DEBUG:\n self.getCallerParams()\n self.headerLogger.debug('Calling method %s with arguments %s'%(self.callerName,self.callerLocals))\n if ((debugLevel==0) or \\\n (self.callerName in self.settings.debugAllowed) or ('all' in self.settings.debugAllowed)) \\\n and ((debugLevel in self.settings.config.getList(\"log\", \"debugAllowedLevels\") )) \\\n and (self.callerName not in self.settings.config.getList(\"log\", \"debugRestricted\")):\n print 'Calling method %s with arguments %s'%(self.callerName,self.callerLocals)\n #else hiddenMessagesLog.append(message) # Dropped in 0.24 because of loggers.",
"def _debugGenerator(self, generatorName, obj):\n\n debug.println(debug.LEVEL_FINER,\n \"GENERATOR: %s\" % generatorName)\n debug.println(debug.LEVEL_FINER,\n \" obj = %s\" % obj.name)\n debug.println(debug.LEVEL_FINER,\n \" role = %s\" % obj.getRoleName())",
"def test_middleware_loads(self):\n self.client.get(\"/__debug__\")",
"def verbose_print(text,verbose_level):\n if Args.verbose >= verbose_level:\n print '\\t' * (verbose_level-1) + text",
"def verbose_print(verbose, print_function=None):\n\n if verbose:\n return print_function or print\n else:\n def vprint(*args, **kwars):\n pass\n return vprint",
"def showconnecttoapiserver():\n print('\\n{0}'.format(middleware.preference.apiServerType))",
"def do_showlocals(self, line):\n if(Rsp.state != STOPPED):\n self.output = \"Command only possible during STOPPED-state.\"\n return\n curfunc = get_func(Rsp.pc) \n self.output = \"Funktion:%s\"%curfunc\n stackmap = funcmap[curfunc].stacklocals\n regmap = funcmap[curfunc].reglocals\n for var in stackmap:\n self.output += \"%s:%s\\n\"%(var, typemap[stackmap[var].type].name) \n for var in regmap:\n self.output += \"%s:%s\\n\"%(var, typemap[regmap[var].type].name)",
"def standalone_arrow(m) -> str:\n return m.standalone_arrow",
"def output_debug_info(self):",
"def trace(msg, minLevel=1):\n global verbose\n if verbose >= minLevel:\n tracePrint(msg)",
"def show_trailer(self):",
"def print_debug(obj):\n\n if not DEBUG:\n return False\n\n if hasattr(obj, 'lower'):\n # string/unicode... just print it.\n print('Debug: {}'.format(obj))\n elif isinstance(obj, (list, tuple)):\n # list/tuple, format it...\n header = 'Debug: '\n spaces = (' ' * len(header))\n if obj:\n print('{}{}'.format(header, obj[0]))\n if len(obj) > 1:\n otherlines = '\\n{}'.format(spaces).join(obj[1:])\n print('\\n{}{}'.format(spaces, otherlines))\n else:\n # different type of object\n print('Debug: {!r}'.format(obj))\n\n return True",
"def print_trace(view_func):\r\n @wraps(view_func, assigned=available_attrs(view_func))\r\n def _wrapped_view_func(request, *args, **kwargs):\r\n try:\r\n return view_func(request, *args, **kwargs)\r\n except:\r\n import traceback\r\n traceback.print_exc()\r\n return _wrapped_view_func",
"def test_debug_prefix_ok_with_not_whitelisted_keyword():\n tree = parse(dedent(\"\"\"\\\n import logging\n\n logging.info(\n \"Hello {debug_hello}!\",\n extra=dict(\n debug_hello=\"{}\",\n ),\n )\n \"\"\"))\n whitelist = Whitelist(group=\"logging.extra.example\")\n visitor = LoggingVisitor(whitelist=whitelist)\n visitor.visit(tree)\n\n assert_that(whitelist, contains(\"world\"))\n assert_that(visitor.violations, is_(empty()))",
"def d_print(msg):\n if (DEBUG == 1):\n print(msg)",
"def __debug(msg):\n\n pass",
"def _vprint(self, string):\n if self.verbose:\n print(string)",
"def debug_print(self):\n print self.title\n print self.storyline\n print self.poster_image_url\n print self.trailer_youtube_url\n print \"------\"",
"def _print(self, *args, **kwargs) -> None:\n # Only print in verbose mode\n if self._verbose:\n arglist = list(args)\n arglist[0] = f\"[buddy-{self._experiment_name}] {args[0]}\"\n print(*arglist, **kwargs)",
"def trace(filler):\n @decorator\n def dec(func):\n def wrapper(*args, **kwargs):\n indent = filler * wrapper.level\n arguments = ', '.join(str(x) for x in args)\n print('{} --> {}({})'.format(indent, func.__name__, arguments))\n wrapper.level += 1\n\n result = func(*args, **kwargs)\n print('{} <-- {}({}) == {}'.format(indent, func.__name__, arguments, result))\n wrapper.level -= 1\n return result\n wrapper.level = 0\n return wrapper\n return dec",
"def test_pydotprint_long_name():\r\n\r\n # Skip test if pydot is not available.\r\n if not theano.printing.pydot_imported:\r\n raise SkipTest('pydot not available')\r\n\r\n x = tensor.dvector()\r\n mode = theano.compile.mode.get_default_mode().excluding(\"fusion\")\r\n f = theano.function([x], [x * 2, x + x], mode=mode)\r\n f([1, 2, 3, 4])\r\n\r\n s = StringIO()\r\n new_handler = logging.StreamHandler(s)\r\n new_handler.setLevel(logging.DEBUG)\r\n orig_handler = theano.logging_default_handler\r\n\r\n theano.printing.pydotprint(f, max_label_size=5,\r\n print_output_file=False,\r\n assert_nb_all_strings=6)",
"def pprint(obj):\n for argname in sorted([x for x in dir(obj) if not x.startswith('__')]):\n # Skip callables\n if hasattr(getattr(obj, argname), '__call__'):\n continue\n print(\"{} : {}\".format(argname, getattr(obj, argname)))",
"def debug_print(*a):\n if enable_debug_output:\n print(' '.join(map(str, a)))",
"def _verbose(self,text):\n if self.verbose:\n print(text)",
"def f_Dumpfname(func):\n @wraps(func)\n def echo_func(*func_args, **func_kwargs):\n if DEBUG: print('func \\033[1;31m {}()\\033[0m called by \\033[1;31m{}() \\033[0m'.format(func.__name__,sys._getframe(1).f_code.co_name))\n return func(*func_args, **func_kwargs)\n return echo_func",
"def output_debug(text):\n if conf.debug:\n output_message('[DEBUG] ' + text)",
"def print_verbose(self) -> None:\n print(self)\n if self.meta is not None:\n print(self.meta.__repr__())",
"def _debug_print(message):\n\n if _debug == True:\n print(message)",
"def debug(func):\n\n @functools.wraps(func)\n def decorated(*args, **kwargs):\n if args and not kwargs:\n print(\"~ input of {}: args: {}\".format(func.__name__, args))\n elif not args and kwargs:\n print(\"~ input of {}: kwargs: {}\".format(func.__name__, kwargs))\n elif args and kwargs:\n print(\"~ input of {}: args: {}, kwargs: {}\".format(func.__name__, args, kwargs))\n else:\n print(\"~ input of {}: NO_ARGS\".format(func.__name__))\n output = func(*args, **kwargs) # stores the result of the function\n print(\"~ output of {}:\".format(func.__name__), output)\n return output\n\n return decorated",
"def curried_printer(*args):\n if pretty:\n pretty_log(function_name, args)\n else:\n generic_log(function_name, args)\n return function_name",
"def DEBUG_PRINT(msg, obj='', suffix=''):\n if PRINT_DEBUGS:\n print msg, obj, suffix",
"def _show(self, indent = 0):\n print(\" \"*indent, \"Name:\", self.name)\n print(\" \"*indent, \"Description:\", self.description)",
"def vprint (*args, take_action=False, **kwargs):\n\n take_action = take_action and not opts.take_action\n\n if opts.verbose or take_action:\n print (*args, **kwargs)\n\n return take_action",
"def welcome():\n return (f\"Available Routes:<br/>\"\\\n\n f\"/api/v1.0/precipitation\"\\\n\n f\"/api/v1.0/stations\"\\\n\n f\"/api/v1.0/tobs\"\\\n\n f\"/api/v1.0/start\"\\\n\n f\"/api/v1.0/start/end\")",
"def print_info(message: str):\n global verbose\n if verbose:\n print(\"%s%s%s\" % (KYEL, message, KNRM))",
"def get_debug(quiet):\n if quiet:\n def debug(*args, **kwargs):\n pass\n else:\n def debug(*args, **kwargs):\n print(*args, **kwargs)\n return debug",
"def debugPrint(text: str):\r\n if DEBUG:\r\n print(text)",
"def debug(func):\n if VERBOSE > 0:\n @functools.wraps(func)\n def wrapper_debug(*args, **kwargs):\n args_repr = [repr(a) for a in args]\n kwargs_repr = [f\"{k}={v!r}\" for k, v in kwargs.items()]\n signature = \", \".join(args_repr + kwargs_repr)\n\n print(f\"Calling {func.__name__}({signature})\\n\")\n value = func(*args, **kwargs)\n print(f\"{func.__name__!r} returned {value!r}\\n\")\n\n return value\n\n return wrapper_debug\n else:\n return func",
"def debug(version=False):\n if version:\n print(__version__)\n return\n print(json.dumps(MANAGE_DICT, indent=2))",
"def debug_decorator(func):\n\n def wrapper(*args, **kwargs):\n\n from main_loop import debug_mode\n\n if debug_mode:\n\n game_logger.logging.debug(\"Function name: \" + func.__name__)\n\n game_logger.logging.debug(\"Args: \")\n game_logger.logging.debug(args)\n\n game_logger.logging.debug(\"Kwargs: \")\n game_logger.logging.debug(kwargs)\n\n return func(*args, **kwargs)\n\n return wrapper",
"def debug():",
"def verbose(string, level, indent=None):\n if args.verbose:\n if args.verbose > level:\n if indent is None:\n if level <= LEVEL_4:\n indent = \" \" * level\n else:\n indent = \" \"\n print (indent + string)\n return",
"def debug_print(function):\n def debug(thing):\n print(function(thing))\n return thing\n return debug",
"def print_level():\n print(\"\")\n\n def show_hide_word(word):\n \"\"\"show/hide finished/unfinished words\"\"\"\n if word not in current_level.finished_words:\n return \"*\" * len(word)\n return word\n\n current_level.layout.print_layout(\n show_hide_word,\n # Print unfinished words first with '*'\n set(current_level.words) - set(current_level.finished_words),\n )\n\n # level state\n print(\"\")\n print(\"Level: %d/%d\" % (current_level_index + 1, len(all_levels)))\n if current_level.bonus_words:\n bonus_words_status = \"Bonus words: %d/%d\" % (\n len(current_level.finished_bonus_words),\n len(current_level.bonus_words)\n )\n bonus_words_status += \" %s\" % \" \".join(\n change_case(word)\n if word in current_level.finished_bonus_words\n else \"*\" * len(word)\n for word in current_level.bonus_words\n )\n print(bonus_words_status)\n\n # characters\n print(\"\")\n print(\"Chars: %s\" % \" \".join(change_case(char) for char in current_level.chars))\n print(\"\")",
"def print(self):\n self.print_avec_separateur(\" \")",
"def print_frame(self, name, frame, on_svr=False):\n\n name = \"print{}: {}\".format(self.step, name)\n\n # print using svr\n if on_svr:\n svr.debug(name, frame)\n\n # print using openCV\n else: \n self.debug_stream(name, frame)\n\n # increment step counter\n self.step += 1",
"def show(self,verbose=0):\n print 'inferenceArgs',self.ws.inferenceArgs\n print 'inferenceExpr',theano.pp(self.ws.inferenceExpr)\n if verbose>=1:\n print 'debugprint inferenceExpr:'\n theano.printing.debugprint(self.ws.inferenceExpr)\n if self.ws.dataLossExpr:\n print 'dataLossArgs',self.ws.dataLossArgs\n print 'dataLossExpr',theano.pp(self.ws.dataLossExpr)\n print 'debugprint dataLossExpr:'\n theano.printing.debugprint(self.ws.dataLossExpr)",
"def _p(self, *args, level=2, **kwargs):\n if self._verbosity >= level:\n print(*args, **kwargs)",
"def known_verbose_name():\n return 'test Verbose name'",
"def debug(self, *args, **kwargs):\n self.msg(logging.DEBUG, *args, **kwargs)",
"def print_debug(msg, category, stepsback=1):\n if debug.is_on(category):\n if config.cfg.helpful_debugging:\n # Get caller info\n fn, lineno, funcnm = inspect.stack()[stepsback][1:4]\n to_print = colour.cstring(\"DEBUG %s [%s:%d - %s(...)]:\\n\" %\n (category.upper(),\n os.path.split(fn)[-1], lineno,\n funcnm), 'debughdr')\n msg = msg.replace('\\n', '\\n ')\n to_print += colour.cstring(\" %s\" % msg, 'debug')\n else:\n to_print = colour.cstring(msg, 'debug')\n sys.stderr.write(to_print + '\\n')\n sys.stderr.flush()",
"def printdebug(self, msg):\n if self.debug > 0:\n print(msg)",
"def setup_print(self, t, message):\n if self.config['debug']:\n return tf.Print(t, (t,), message=message+\": \", summarize=10)\n return t",
"def debug(self) -> str:\n\n return Widget.debug(self)",
"def dprint(msg):\n if defaults.debug:\n print('debug: %s' % msg)",
"def _print_matrix_info(mtrx, name):\r\n pr = lambda t: print(\"ht3_solver:\\t\" + t)\r\n pr(\"MATRIX INFO:\")\r\n pr(\"Matrix:\\t\" + name)\r\n pr(\"Description:\\t\" + str(mtrx.description))\r\n pr(\"Shape:\\t\" + str(mtrx.shape))",
"def _pretty_print_token(self, token):\n INLINE = 0\n BOL = 1\n extended_print = ('ID', 'INT', 'FLOAT', 'STRING')\n next_line_tokens = ('NEWLINE', 'INDENT', 'DEDENT')\n\n if self.printer_state == BOL:\n self.printer_state = INLINE\n\n print(str(token.lineno) + self.level * \" \", end=' ')\n\n if token is None:\n pass\n elif token.type in next_line_tokens:\n if token.type == \"INDENT\":\n self.level += 1\n elif token.type == \"DEDENT\":\n self.level -= 1\n\n print(token.type + '\\n', end=' ')\n self.printer_state = BOL\n elif token.type in extended_print:\n print('(' + token.type + ', ' + str(token.value) + ')', end=' ')\n else:\n print(token.type, end=' ')",
"def lambdarepr(expr, **settings):\n return LambdaPrinter(settings).doprint(expr)",
"def symbol_for_inspector_line(self, lineno):\n def indent(line):\n n = 0\n for char in line:\n if char == ' ':\n n += 1\n else:\n break\n return n / 2\n\n lines = self._vim.current.buffer[:lineno]\n i = indent(lines[-1])\n fqn = [lines[-1].split()[-1]]\n\n for line in reversed(lines):\n if indent(line) == i - 1:\n i -= 1\n fqn.insert(0, line.split()[-1])\n\n return \".\".join(fqn)",
"def print_trail(word):\n if len(word) == 0:\n return\n print(word, end = ' ')\n t = is_reducible(word, word_dict)\n print_trail(t[0])",
"def tracing_name(name: Optional[str] = None) -> str:\n if name is None:\n name = settings.SERVICE_NAME\n return f\"{name}.{settings.ENVIRONMENT.lower()}\"",
"def debug_print(*items):\n\t\n\tif items and app.config['DEBUG']:\n\t\tprint ' '.join([str(item) for item in items])",
"def debug(cls, message):\n if cls.verbose:\n print('[DEBUG] {0}'.format(message))",
"def debug(message: str) -> None:\n if is_verbose():\n print(message)\n else:\n sys.stdout.write(\".\")\n sys.stdout.flush()",
"def debug_print(self, *content):\n if self.debug:\n print(*content)",
"def print_intro(self):\n \n print('Did you know mammals tend to have the shortest migration routes because walking takes more energy than flying or swimming?')",
"def _debug():\n return _DEBUG"
] | [
"0.85265154",
"0.59112906",
"0.5598949",
"0.5553952",
"0.5419853",
"0.52704614",
"0.52555555",
"0.5231697",
"0.5227046",
"0.5205608",
"0.5202568",
"0.5120578",
"0.51193374",
"0.5041665",
"0.5041665",
"0.50310254",
"0.50283116",
"0.50258374",
"0.5003555",
"0.49982905",
"0.49935257",
"0.49706155",
"0.49673685",
"0.49571374",
"0.49556172",
"0.49550134",
"0.49333254",
"0.49314147",
"0.49069685",
"0.4895962",
"0.48937652",
"0.48759428",
"0.48674655",
"0.4861467",
"0.4843046",
"0.4841487",
"0.48389322",
"0.48342535",
"0.48296154",
"0.48075613",
"0.48017713",
"0.47950473",
"0.47900966",
"0.47838694",
"0.47795582",
"0.47659546",
"0.47583258",
"0.47508883",
"0.47455722",
"0.47420806",
"0.47414967",
"0.47386262",
"0.4738049",
"0.47251",
"0.4723698",
"0.47184104",
"0.47152716",
"0.4713069",
"0.47123325",
"0.47101",
"0.47094202",
"0.47076893",
"0.4704203",
"0.46978554",
"0.46787876",
"0.46772522",
"0.4670916",
"0.46701455",
"0.46633828",
"0.46550903",
"0.465437",
"0.46497837",
"0.46411026",
"0.46391645",
"0.46326682",
"0.4631635",
"0.46232212",
"0.4621358",
"0.46162066",
"0.4612876",
"0.4605308",
"0.4602989",
"0.4600029",
"0.45972776",
"0.45915148",
"0.45895094",
"0.4587773",
"0.4583509",
"0.45793432",
"0.45769274",
"0.45765924",
"0.45711797",
"0.45679194",
"0.45648775",
"0.45642796",
"0.45625386",
"0.45565686",
"0.4548214",
"0.45480245",
"0.45335466"
] | 0.8331396 | 1 |
This function is not very useful, so it's advised not to use it, because it can be removed at any time before 1.0.0 | def mw_from_cm(name, cm_constructor, ks=None, ctx_args={}, **kwargs):
def new_middleware(handler):
def new_handler(ctx):
_print_inwards(name)
ctx_kwargs = {}
for k, ks_ in ctx_args:
ctx_kwargs[k] = get_in(ctx, ks_)
with cm_constructor(**ctx_kwargs, **kwargs) as v:
if ks:
assoc_in(ctx, ks, v)
new_ctx = handler(ctx)
_print_outwards(name)
return new_ctx
return new_handler
return new_middleware | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_4_4_1_1(self):\n pass",
"def regular(self):",
"def __call__(self) -> None:",
"def exo2():",
"def degibber(self):",
"def exercise_b2_53():\r\n pass",
"def exercise_b2_106():\r\n pass",
"def _prepare(self):",
"def _prepare(self):",
"def nulltest():",
"def support(self):",
"def fn():",
"def exercise_b2_107():\r\n pass",
"def test_create_unexpected_problem(self):\n pass",
"def __upgrade(self):",
"def cx():",
"def __call__():",
"def __call__():",
"def __call__():",
"def __call__():",
"def __call__():",
"def exercise_b2_113():\r\n pass",
"def preprocess(self):",
"def substantiate():",
"def test_replace_identity(self):\n pass",
"def func():",
"def util():\n pass",
"def util():\n pass",
"def base():",
"def apply(self) -> None:",
"def apply(self) -> None:",
"def common(self):",
"def _regr_basic():",
"def transform(self):",
"def exercise_b2_69():\r\n pass",
"def exercise_b2_52():\r\n pass",
"def test_fix_code_typical_code():\r\n\r\n pass",
"def _to_be_wrapped(self) -> None:",
"def test_uparforvarg(self):",
"def exercise_b2_26():\r\n pass",
"def use(self):",
"def exercise_b2_27():\r\n pass",
"def _hook(self):",
"def test_get_parts(self):\n pass",
"def horde_cleanup(self):",
"def _origin(self):\n return 1",
"def exercise_b2_98():\r\n pass",
"def check():",
"def non_pythranizable(arg):\n return arg",
"def exercise_b2_82():\r\n pass",
"def exercise_b2_56():\r\n pass",
"def implement(self):\n\t#@DEBUG remove comments",
"def check(self) -> None:",
"def revise():",
"def override(self):\n return None",
"def __init__():",
"def test_patch_none():",
"def firstFunction(self):",
"def dummy_fn(self):\n\t\tpass",
"def __int__(self):\n pass",
"def _fix_up(self, cls, code_name):",
"def __call__(self):\n\t\treturn",
"def sth():",
"def exercise_b2_39():\r\n pass",
"def __call__(object):",
"def dummy(self):\n pass",
"def g():",
"def exercise_b2_43():\r\n pass",
"def test_trailing_data(self):",
"def problem_298():\n pass",
"def preprocess(self):\n raise RuntimeError(\"please implement this function!\")",
"def test_convert():",
"def test_get_part(self):\n pass",
"def swint(self) -> None:",
"def exercise_b2_70():\r\n pass",
"def test_solareclipses_get(self):\n pass",
"def apply(self):",
"def _build_impl(self):",
"def getfilesystemencodeerrors(): # real signature unknown; restored from __doc__\n return \"\"",
"def test_probabilistic_parsers():",
"def check_stability(self):",
"def map():",
"def unusedFromKDOTDataPreparation():",
"def testBeliefs1sk(self):",
"def question_4():\n return None",
"def MINET(self):",
"def _init(self):",
"def falcon():",
"def test_unicode_insert_error():\n# In addition, we should use vagrant or azure deployments of the scanner to Ubuntu and Windows virtual machines\n# to ensure cross-platform behavior.\n pass",
"def mezclar_bolsa(self):",
"def exercise_b2_93():\r\n pass",
"def think(s):",
"def test_get_list8(self):\n pass",
"def pick_up(self):",
"def _get_version(self):",
"def simple():",
"def simple():",
"def question_8():\n return None",
"def __len__(self) -> int:",
"def __len__(self) -> int:",
"def part2(_input):\n\n return None"
] | [
"0.54195523",
"0.53025854",
"0.5283479",
"0.52821845",
"0.5257055",
"0.5218573",
"0.51446503",
"0.5124683",
"0.5124683",
"0.51094174",
"0.5101899",
"0.5096671",
"0.5088057",
"0.50846183",
"0.50812197",
"0.507752",
"0.507305",
"0.507305",
"0.507305",
"0.507305",
"0.507305",
"0.5067609",
"0.502319",
"0.49678788",
"0.49611017",
"0.49338377",
"0.4920126",
"0.4920126",
"0.4901313",
"0.48867932",
"0.48867932",
"0.48770574",
"0.48666346",
"0.48327488",
"0.48309305",
"0.48222393",
"0.480983",
"0.4801457",
"0.47588083",
"0.47538963",
"0.475338",
"0.4753056",
"0.47467372",
"0.47378466",
"0.47278047",
"0.47208077",
"0.4715078",
"0.4712381",
"0.47037444",
"0.4702195",
"0.46881866",
"0.46856645",
"0.46841463",
"0.46808147",
"0.46802998",
"0.46783173",
"0.46781474",
"0.46746233",
"0.46711242",
"0.4661016",
"0.4651771",
"0.46475595",
"0.46457294",
"0.4636056",
"0.4628789",
"0.46282852",
"0.46195653",
"0.4607858",
"0.46028915",
"0.4594017",
"0.45937002",
"0.4587819",
"0.4587205",
"0.45829156",
"0.4578329",
"0.45746806",
"0.45682248",
"0.45678234",
"0.45618838",
"0.4558298",
"0.45582956",
"0.45576367",
"0.45452058",
"0.45451772",
"0.4542856",
"0.45388716",
"0.45352268",
"0.4532205",
"0.4530624",
"0.45281318",
"0.45209923",
"0.45195493",
"0.45126343",
"0.45120186",
"0.4509754",
"0.45007288",
"0.45007288",
"0.44869584",
"0.44848436",
"0.44848436",
"0.4479803"
] | 0.0 | -1 |
This function is used to decorate generators with exactly two `yield` statements and turn them into middleware. For examples see documentation to this module and tests. Extra arguments beyond name are passed to the generator that is being decorated during instantiation. If they are not defined during interpretation of this module, then this function can be used as a regular callable and not as an annotation. | def middleware(name, *args, **kwargs):
def new_annotate(g_fn):
def new_middleware(handler):
def new_handler(ctx):
_print_inwards(name)
g = g_fn(ctx, *args, **kwargs)
changed_ctx = next(g)
new_ctx = handler(changed_ctx)
last_ctx = g.send(new_ctx)
_print_outwards(name)
return last_ctx
return new_handler
return new_middleware
return new_annotate | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def consumer(func):\n\n from functools import wraps\n\n @wraps(func)\n def wrapper(*args,**kw):\n gen = func(*args, **kw)\n gen.next()\n return gen\n return wrapper",
"def named(name):\n\n def new_annotate(mware):\n def new_middleware(handler):\n\n new_handler = mware(handler)\n\n def verbose_handler(ctx):\n _print_inwards(name)\n\n new_ctx = new_handler(ctx)\n\n _print_outwards(name)\n\n return new_ctx\n\n return verbose_handler\n\n return new_middleware\n\n return new_annotate",
"def writer_wrapper_2(coroutine):\n yield from coroutine",
"def with_outer(*args):\n def generator():\n for i in args:\n yield i\n return generator",
"def construct_result_generator_middleware(result_generators):\n def result_generator_middleware(make_request, web3):\n def middleware(method, params):\n if method in result_generators:\n result = result_generators[method](method, params)\n return {'result': result}\n else:\n return make_request(method, params)\n return middleware\n return result_generator_middleware",
"def test_decorated(*args):\n for i in args:\n yield i",
"def get_generator(generator: Generator, **kwargs) -> Generator:\n return generator(**kwargs)",
"def writer_wrapper_3(coroutine):\n coroutine.send(None) # prime the coro\n while True:\n try:\n try:\n x = (yield)\n except SpamException as e: # This catches the SpamException\n coroutine.throw(e)\n else:\n coroutine.send(x)\n except StopIteration:\n pass",
"def _wrap_in_generator(func, source, namer, overload):\n\n nonlocals = []\n\n for var in six.get_function_code(func).co_freevars:\n # We must generate dummy vars so the generated function has the same closure\n # as the original function.\n free_template = 'var = None'\n nonlocal_node = templates.replace(free_template, var=var)\n nonlocals.extend(nonlocal_node)\n\n gen_fun_name = namer.new_symbol('gen_fun', set())\n template = \"\"\"\n def gen_fun(overload):\n nonlocals\n\n program\n\n return f_name\n \"\"\"\n\n ret = templates.replace(\n template,\n gen_fun=gen_fun_name,\n nonlocals=nonlocals,\n overload=overload.symbol_name,\n program=source,\n f_name=func.__name__)\n\n converted_module, _ = parsing.ast_to_object(ret)\n outer_func = getattr(converted_module, gen_fun_name)\n return outer_func(overload.module)",
"def simple():\n yield 1\n yield 2\n yield 3",
"def index(*args, **kwargs):\n yield from _generate(*args, **kwargs)",
"def coroutine(func):\n @wraps(func)\n def primer(*args, **kwargs):\n gen = func(*args, **kwargs)\n next(gen)\n return gen\n return primer",
"def simple_generator():\n yield 'horse'\n # just going to do it...\n yield 'cow'\n yield 'mouse'",
"def func_star(a_b):\n return insideroutine(*a_b)",
"def generator(func):\n\n @fn\n @wraps(func)\n def gen(*args, **kwargs):\n return Iter(func(*args, **kwargs))\n\n return gen",
"def _wrap_generator(self, func):\n @functools.wraps(func)\n def generator_context(*args, **kwargs):\n gen = func(*args, **kwargs)\n\n # Generators are suspended and unsuspended at `yield`, hence we\n # make sure the grad mode is properly set every time the execution\n # flow returns into the wrapped generator and restored when it\n # returns through our `yield` to our caller (see PR #49017).\n cls = type(self)\n try:\n # Issuing `None` to a generator fires it up\n with cls():\n response = gen.send(None)\n\n while True:\n try:\n # Forward the response to our caller and get its next request\n request = yield response\n\n except GeneratorExit:\n # Inform the still active generator about its imminent closure\n with cls():\n gen.close()\n raise\n\n except BaseException:\n # Propagate the exception thrown at us by the caller\n with cls():\n response = gen.throw(*sys.exc_info())\n\n else:\n # Pass the last request to the generator and get its response\n with cls():\n response = gen.send(request)\n\n # We let the exceptions raised above by the generator's `.throw` or\n # `.send` methods bubble up to our caller, except for StopIteration\n except StopIteration as e:\n # The generator informed us that it is done: take whatever its\n # returned value (if any) was and indicate that we're done too\n # by returning it (see docs for python's return-statement).\n return e.value\n\n return generator_context",
"def _wrap_generator(ctx_factory, func):\n @functools.wraps(func)\n def generator_context(*args, **kwargs):\n gen = func(*args, **kwargs)\n\n # Generators are suspended and unsuspended at `yield`, hence we\n # make sure the grad mode is properly set every time the execution\n # flow returns into the wrapped generator and restored when it\n # returns through our `yield` to our caller (see PR #49017).\n try:\n # Issuing `None` to a generator fires it up\n with ctx_factory():\n response = gen.send(None)\n\n while True:\n try:\n # Forward the response to our caller and get its next request\n request = yield response\n\n except GeneratorExit:\n # Inform the still active generator about its imminent closure\n with ctx_factory():\n gen.close()\n raise\n\n except BaseException:\n # Propagate the exception thrown at us by the caller\n with ctx_factory():\n response = gen.throw(*sys.exc_info())\n\n else:\n # Pass the last request to the generator and get its response\n with ctx_factory():\n response = gen.send(request)\n\n # We let the exceptions raised above by the generator's `.throw` or\n # `.send` methods bubble up to our caller, except for StopIteration\n except StopIteration as e:\n # The generator informed us that it is done: take whatever its\n # returned value (if any) was and indicate that we're done too\n # by returning it (see docs for python's return-statement).\n return e.value\n\n return generator_context",
"def test_nested_yield():\n yield (yield (yield 1))",
"def patched_generator(self, *args, **kwargs):\n self.validate(*args, **kwargs)\n yield from self.function(*args, **kwargs)",
"def test_func_generator_name():\n for i in range(0, 4):\n yield 'try_odd', i",
"def new_generator(self):\n return self.generator_function(*self.args, **self.kwargs)",
"def multiple_decorator(x):\n return x",
"def construct_error_generator_middleware(error_generators):\n def error_generator_middleware(make_request, web3):\n def middleware(method, params):\n if method in error_generators:\n error_msg = error_generators[method](method, params)\n return {'error': error_msg}\n else:\n return make_request(method, params)\n return middleware\n return error_generator_middleware",
"def test_generator_method_name(self):\n for i in range(0, 4):\n yield 'try_odd', i",
"def req_as_decorator(req_output, *args, **kwargs):\r\n return req_output(dummy_func)(*args, **kwargs)",
"def counter_wrapper_2(generator):\n yield from generator",
"def abc():\r\n yield \"a\"\r\n yield \"b\"\r\n yield \"c\"",
"def _context(name, func):\n\tpush_aspect(name, func)\n\tyield\n\tpop_aspect(name)",
"def wrap_generator(generator, wrapper_function):\n for item in generator:\n yield wrapper_function(item)",
"def testExplicitGeneratorConvenienceFunctionUsage(self):\n\t\tc = Controller()\n\t\tx = c.mock()\n\t\tc.generator(x.g(8, 9), [10, 11])\n\t\tc.replay()\n\t\tself.failUnless([k for k in x.g(8, 9)] == [10, 11])",
"def generator_container_wrapper(*args, **kwargs):\n return GeneratorContainer(generator_function, *args, **kwargs)",
"def very_simple():\n yield 1",
"def yield_from(expression):\n pass # Calls to this function are replaced with the _yield_from template below.",
"async def writing(*names: Strings) -> AsyncGenerator:\n invocation = Invocation.current\n assert not invocation.has_locks\n old_required_locks = invocation.required_locks\n try:\n invocation.required_locks = copy(old_required_locks)\n for name in each_string(*names):\n invocation.required_locks[name] = True\n yield\n finally:\n invocation._become_current() # pylint: disable=protected-access\n invocation.required_locks = old_required_locks",
"def wrap_generator(func):\n\n async def _wrapped(*a, **k):\n r, ret = None, []\n gen = func(*a, **k)\n while True:\n try:\n item = gen.send(r)\n except StopIteration:\n break\n if inspect.isawaitable(item):\n r = await item\n else:\n r = item\n ret.append(r)\n\n if len(ret) == 1:\n return ret.pop()\n return ret\n\n return _wrapped",
"def preprocess(generator):\n\n def preprocess_decorator(method):\n\n @wrapper(method)\n def preprocess_wrapper(self, *args, **kwargs):\n self.increment_pc()\n pc = tuple(self.program_counter)\n try:\n return self._pool.pop(pc), True\n except KeyError:\n key = (generator, args)\n pcs = self._needed_data.setdefault(key, [])\n pcs.append(pc)\n self.fork_pc()\n try:\n return method(self, *args, **kwargs), False\n finally:\n self.unfork_pc()\n\n return preprocess_wrapper\n return preprocess_decorator",
"def test_generator_scope():\n def inner(val):\n print(\"inner running\")\n return [0, val]\n gen = (a for a in inner(10))\n print(\"generator created\")\n return gen",
"def test_generator_inline(self):\n def test_odd(v):\n assert v % 2\n for i in range(0, 4):\n yield test_odd, i",
"def parser_for(self, name):\n\n def decorator(func):\n self.add_parser(name, func)\n return func\n\n return decorator",
"def forward_context(self) -> Generator[None, None, None]:\n yield",
"def _yielder(self, recv_generator, req_id): #{\n logger = self.logger\n\n def _send(method, args):\n _, msg_list = self._build_request(method, args, None, False, req_id=req_id)\n logger.debug('send: %r' % msg_list)\n self._send_request(msg_list)\n\n try:\n _send('YIELD_SEND', None)\n while True:\n _, obj = next(recv_generator)\n try:\n to_send = yield obj\n except Exception, e:\n logger.debug('generator.throw()')\n etype, evalue, _ = exc_info()\n _send('YIELD_THROW', [str(etype.__name__), str(evalue)])\n else:\n _send('YIELD_SEND', to_send)\n except StopIteration:\n return\n except GeneratorExit, e:\n logger.debug('generator.close()')\n _send('YIELD_CLOSE', None)\n next(recv_generator)\n raise e\n finally:\n logger.debug('_yielder exits (req_id=%s)', req_id)",
"def async_generator(func):\n @functools.wraps(func)\n def function(*args, **kwargs):\n \"Wrapped function\"\n return GeneratorFuture(func(*args, **kwargs))\n return function",
"def _populate_next(self, graph, *args, yield_response_gen=False, **kwargs):\n\n if yield_response_gen:\n kwargs['yield_response_gen'] = yield_response_gen\n format, *header_chunks, (resp, gen) = self.data_next(**kwargs)\n self._populate(graph, header_chunks)\n yield format\n yield from header_chunks\n yield resp, gen\n else:\n generator = self.data_next(**kwargs)\n format = next(generator)\n self._populate(graph, generator)",
"def generator(fn):\n def wrapped(schema):\n result = fn(schema)\n if result is not None:\n expected_result = _check_for_expected_result(fn.__name__, schema)\n return (fn.__name__, result, expected_result)\n return\n return wrapped",
"def test_doify():\n def genfun(tymth, tock=0.0, **opts):\n tyme = yield(tock)\n\n assert inspect.isgeneratorfunction(genfun)\n\n gf0 = doing.doify(genfun, name='gf0', tock=0.25)\n gf1 = doing.doify(genfun, name='gf1', tock=0.125)\n\n assert inspect.isgeneratorfunction(gf0)\n assert inspect.isgeneratorfunction(gf1)\n assert id(gf0) != id(gf1)\n\n assert gf0.__name__ == 'gf0'\n assert gf1.__name__ == 'gf1'\n assert gf0.tock == 0.25\n assert gf1.tock == 0.125\n assert gf0.done == None\n assert gf1.done == None\n assert gf0.opts == dict()\n assert gf1.opts == dict()\n\n tymist = tyming.Tymist()\n\n g0 = gf0(tymth=tymist.tymen(), tock=gf0.tock, **gf0.opts)\n assert inspect.isgenerator(g0)\n g1 = gf0(tymth=tymist.tymen(), tock=gf1.tock, **gf1.opts)\n assert inspect.isgenerator(g1)\n\n assert id(g0) != id(g1)\n \"\"\"End Test\"\"\"",
"def _iterator_codegen(resty):\n\n def codegen(context, builder, sig, args):\n [d] = args\n [td] = sig.args\n iterhelper = context.make_helper(builder, resty)\n iterhelper.parent = d\n iterhelper.state = iterhelper.state.type(None)\n return impl_ret_borrowed(\n context,\n builder,\n resty,\n iterhelper._getvalue(),\n )\n\n return codegen",
"def testExplicitGeneratorUsage(self):\n\t\tc = Controller()\n\t\tx = c.mock()\n\t\tx.g(8, 9)\n\t\tc.generator()\n\t\tc.setReturn(10)\n\t\tc.setReturn(11)\n\t\tc.replay()\n\t\tself.failUnless([k for k in x.g(8, 9)] == [10, 11])",
"def test_func_generator_transplant():\n def test_odd(v):\n assert v % 2\n for i in range(0, 4):\n yield test_odd, i",
"def genspider(ctx, name, agency_name, start_urls):\n start_urls = start_urls.split(',')\n domains = _get_domains(start_urls)\n _gen_spider(name, agency_name, domains, start_urls)\n _gen_tests(name)\n _gen_html(name, start_urls)",
"def public_async_generator(func):\n @functools.wraps(func)\n def function(*args, **kwargs):\n \"Wrapped function\"\n return GeneratorFuture(func(*args, **kwargs)).future\n return function",
"def test_stepregitry_register_func_with_multiple_decorators():\n # given\n registry = StepRegistry()\n context = {}\n registry.create_step_decorators(context)\n\n # when\n def test_step():\n ...\n\n test_step = context[\"given\"](\"pattern\")(test_step)\n test_step = context[\"when\"](\"pattern\")(test_step)\n\n # then\n assert registry.step_implementations(\"Given\") == [\n StepImpl(\"Given\", \"pattern\", test_step)\n ]\n assert registry.step_implementations(\"When\") == [\n StepImpl(\"When\", \"pattern\", test_step)\n ]",
"def test_other_exc_in_generators_yield_from(\n assert_errors,\n parse_ast_tree,\n code,\n statement,\n exception,\n default_options,\n):\n tree = parse_ast_tree(code.format(statement, exception))\n\n visitor = FunctionDefinitionVisitor(default_options, tree=tree)\n visitor.run()\n\n assert_errors(visitor, [])",
"def profile(key_name, custom_emit=None):\n def decorator(func):\n @wraps(func)\n def wrapped(*args, **kwargs):\n with ProfiledBlock(key_name, custom_emit):\n return func(*args, **kwargs)\n return wrapped\n return decorator",
"def Transform(tokenGenerator: Iterator[Token]) -> Generator['Block', Token, None]:\r\n\r\n\t\tstate = ParserState(tokenGenerator)\r\n\t\treturn state.GetGenerator()",
"def hello(self, msg, args):\n yield \"Hello, world!\"",
"def wrapper_fun(*args):\n print(\"Hello Decorator\")\n return fun(*args)",
"def get_yield(self, t, y):\n return",
"def doodad(fun: Generator):\n if inspect.isasyncgenfunction(fun):\n return asynccontextmanager(fun)\n else:\n return contextmanager(fun)",
"def runner(vendor, single_thread=False):\n def decorator(f):\n @functools.wraps(f)\n def wrapper(*args, **kwargs):\n return f(*args, **kwargs)\n\n # Store the parameters on the wrapped generator function\n wrapper.vendor = vendor\n wrapper.single_thread = single_thread\n # Store the wrapped generator function, so the annotation also functions as a registry.\n if not hasattr(runner, 'list'):\n runner.list = []\n runner.list.append(wrapper)\n return wrapper\n return decorator",
"async def test_middleware_multiple(self, dm):\n\n async def _first(ctx, responder, handler):\n responder.frame[\"middles\"] = responder.frame.get(\"middles\", []) + [\"first\"]\n await handler(ctx, responder)\n\n async def _second(ctx, responder, handler):\n responder.frame[\"middles\"] = responder.frame.get(\"middles\", []) + [\"second\"]\n await handler(ctx, responder)\n\n async def _handler(ctx, responder):\n # '_first' should have been called first, then '_second'\n assert responder.frame[\"middles\"] == [\"first\", \"second\"]\n\n dm.add_middleware(_first)\n dm.add_middleware(_second)\n dm.add_dialogue_rule(\"middleware_test\", _handler, intent=\"middle\")\n ctx = create_request(\"domain\", \"middle\")\n result = await dm.apply_handler(ctx, create_responder(ctx))\n assert result.dialogue_state == \"middleware_test\"",
"def chained(func):\n def wrapper(*args, **kwargs):\n for xs in func(*args, **kwargs):\n for x in xs:\n yield x\n return wrapper",
"def test_func_generator():\n def test_odd(v):\n assert v % 2\n for i in range(0, 4):\n yield test_odd, i",
"def arguments(*args):\n def decorate(func):\n func.arguments = args\n return func\n return decorate",
"def generate(self, *args: t.Any, **kwargs: t.Any) -> t.Iterator[str]:\n if self.environment.is_async:\n import asyncio\n\n async def to_list() -> t.List[str]:\n return [x async for x in self.generate_async(*args, **kwargs)]\n\n yield from asyncio.run(to_list())\n return\n\n ctx = self.new_context(dict(*args, **kwargs))\n\n try:\n yield from self.root_render_func(ctx) # type: ignore\n except Exception:\n yield self.environment.handle_exception()",
"def chunk_generator( callback, request, c ):",
"def proxy_manager(manager, ignore_manager):\n if ignore_manager:\n yield\n else:\n with manager:\n yield",
"def test_GeneratorBuilt(self):\n generator = Mock()\n genFn = Mock(return_value=generator)\n args = range(3)\n kwargs = {'one': 1, 'two': 2, 'three': 3}\n \n wrapper = KaoGenerator(genFn, *args, **kwargs)\n genFn.assert_called_once_with(*args, **kwargs)\n self.assertEqual(wrapper.generator, generator)",
"def b2partsgenerator(stepname, idx=None):\n\n def dec(func):\n assert stepname not in b2partsgenmapping\n b2partsgenmapping[stepname] = func\n if idx is None:\n b2partsgenorder.append(stepname)\n else:\n b2partsgenorder.insert(idx, stepname)\n return func\n\n return dec",
"def semigroup_generators(self):",
"def generator_sends_collector():\n def w(collect_list):\n def collector(*a, **kw):\n item = yield\n while item is not None:\n collect_list.append(item)\n item = yield\n collect_list.append(item)\n\n return collector\n\n return w",
"def __call__(self):\n yield from self",
"def example_generator(self, mode: str):\n raise NotImplementedError",
"def function_generator(decorated_function):\n\n def extended_func(*args, **kwargs):\n \"\"\"\n The result function, that wraps decorated function.\n \"\"\"\n return number * decorated_function(*args, **kwargs)\n\n return extended_func",
"def end_of_wrappers(args, wrapper):\n if not len(args):\n return wrapper\n elif len(args) == 1 and callable(args[0]):\n return wrapper(args[0])\n raise IncorrectUseOfTheDecoratorError('You used the awaitable decorator incorrectly. Read the documentation.')",
"def wrap(*args, **kwargs):\n\n print('>> Decorate before executing \"greeting_message\" function')\n func = f(*args, **kwargs)\n print('>> Decorate after executing \"greeting_message\" function')\n return func",
"def enable_scan(name):\n def decorator(func):\n return function_mix(func, code=_enable_scan(func.__code__, name))\n return decorator",
"def yieldRPC(remoteYields): #Status: WIP\r\n pass",
"def testExplicitGeneratorExecptionUsage(self):\n\t\tc = Controller()\n\t\tx = c.mock()\n\t\tx.g(8, 9)\n\t\tc.generator()\n\t\tc.setReturn(10)\n\t\tc.setException(Exception(\"bogus\"))\n\t\tc.replay()\n\t\tg = x.g(8, 9)\n\t\tself.failUnless(g.next() == 10)\n\t\tself.failUnlessRaises(Exception, g.next)",
"def forward_context(self) -> Generator[None, None, None]:\n try:\n yield\n finally:\n pass",
"def make_sync(f):\n @wraps(f)\n def g(*args, **kwargs):\n finished = defer.Deferred()\n \n try:\n gen = f(*args, **kwargs)\n except Exception, e:\n finished.errback(e)\n return finished\n \n # If the decorator was used when it wasn't needed, it may well return a\n # deferred, which should not be passed through another deferred.\n if isinstance(gen, defer.Deferred):\n return gen\n \n # If the function did not return a generator, return straight away.\n if not (hasattr(gen, \"next\") and hasattr(gen, \"send\")):\n finished.callback(gen)\n return finished\n \n # Step forward in the generator code, using the supplied function and\n # arguments, f(*args). This should return a Deferred (yielded by the\n # generator); callbacks are added such that when this generator completes,\n # the value is passed back to the generator, using either gen.send or\n # gen.throw. If StopIteration is raised because the generator has finished,\n # complete finished with the supplied x value; the last generated value. If\n # another exception is raised by the generator, pass it to finished.\n def step(x, f, *args):\n try:\n d = f(*args)\n if isinstance(d, defer.Deferred):\n d.addCallbacks(callback, errback)\n else:\n callback(d)\n except StopIteration:\n finished.callback(x)\n except Exception, e:\n finished.errback(e)\n \n def callback(x):\n step(x, gen.send, x)\n \n def errback(e):\n step(None, gen.throw, e.type, e.value)\n \n step(None, gen.next)\n \n return finished\n\n return g",
"def _yield_once():\n\n class _CallMeNextTime:\n def __await__(self):\n # This is inside the scheduler where we know generator yield is the\n # implementation of task switching in CircuitPython. This throws\n # control back out through user code and up to the scheduler's\n # __iter__ stack which will see that we've suspended _current.\n # Don't yield in async methods; only await unless you're making a library.\n yield\n\n return _CallMeNextTime()",
"def testExplicitGeneratorConvenienceFunctionExceptionUsage(self):\n\t\tc = Controller()\n\t\tx = c.mock()\n\t\tc.generator(x.g(8, 9), [10], Exception(\"bogus\"))\n\t\tc.replay()\n\t\tg = x.g(8, 9)\n\t\tself.failUnless(g.next() == 10)\n\t\tself.failUnlessRaises(Exception, g.next)",
"def monitorGenerator(q, target, *more, **options):\n def stuff(q, *key):\n while True:\n item = (yield),\n q.put(key + item)\n return\n\n for id, reader in itertools.chain([target], more):\n res, name = stuff(q, id), \"{:s}<{!r}>\".format(options.get('name', ''), id)\n yield process.monitor_reader(six.next(res) or res.send, reader, name=name), res\n return",
"def test_stop_iteration_in_generators_yield_from(\n assert_errors,\n parse_ast_tree,\n code,\n statement,\n exception,\n default_options,\n):\n tree = parse_ast_tree(code.format(statement, exception))\n\n visitor = FunctionDefinitionVisitor(default_options, tree=tree)\n visitor.run()\n\n assert_errors(visitor, [StopIterationInsideGeneratorViolation])",
"def get_generator_yield_type(genr):\n return get_generator_type(genr).__args__[0]",
"def multi_manager(*managers):\n\n if not managers:\n source = dedent(\n \"\"\"\n def null_manager(*args, **kwargs):\n yield ()\n \"\"\"\n )\n else:\n with_stmt = \", \".join(\"manager{i}(*args, **kwargs) as result{i}\".format(i=i) for i in range(len(managers)))\n\n result_tuple = \"(\" + \", \".join(\"result{i}\".format(i=i) for i in range(len(managers))) + \")\"\n\n source = dedent(\n \"\"\"\n def multi_manager(*args, **kwargs):\n with {with_stmt}:\n yield {result_tuple}\n \"\"\"\n ).format(with_stmt=with_stmt, result_tuple=result_tuple)\n\n context = {\"manager\" + str(i): manager for i, manager in enumerate(managers)}\n\n return contextmanager(make_function(source=source, context=context))",
"def middleware(self, *args):\n for arg in args:\n if arg and arg not in self.list_middleware:\n self.list_middleware.append(arg)\n return self",
"def sequences(self):\n # i am one\n yield self\n # nothing further\n return",
"def test_nested(a, b, c):\n def one():\n return a\n def two():\n return b\n def three():\n return c\n def new_closure(a, b):\n def sum():\n return a + b\n return sum\n yield one\n yield two\n yield three\n yield new_closure(a, c)",
"def decorating(decorated, extra=None):\n # pylint: disable = R0912\n def flat_names(args):\n \"\"\" Create flat list of argument names \"\"\"\n for arg in args:\n if isinstance(arg, basestring):\n yield arg\n else:\n for arg in flat_names(arg):\n yield arg\n name = decorated.__name__\n try:\n dargspec = argspec = _inspect.getargspec(decorated)\n except TypeError:\n dargspec = argspec = ([], 'args', 'kwargs', None)\n if extra:\n keys = extra.keys()\n argspec[0].extend(keys)\n defaults = list(argspec[3] or ())\n for key in keys:\n defaults.append(extra[key])\n argspec = (argspec[0], argspec[1], argspec[2], defaults)\n\n # assign a name for the proxy function.\n # Make sure it's not already used for something else (function\n # name or argument)\n counter, proxy_name = -1, 'proxy'\n names = dict.fromkeys(flat_names(argspec[0]))\n names[name] = None\n while proxy_name in names:\n counter += 1\n proxy_name = 'proxy%s' % counter\n\n def inner(decorator):\n \"\"\" Actual decorator \"\"\"\n # Compile wrapper function\n space = {proxy_name: decorator}\n if argspec[3]:\n kwnames = argspec[0][-len(argspec[3]):]\n else:\n kwnames = None\n passed = _inspect.formatargspec(argspec[0], argspec[1], argspec[2],\n kwnames, formatvalue=lambda value: '=' + value\n )\n # pylint: disable = W0122\n exec \"def %s%s: return %s%s\" % (\n name, _inspect.formatargspec(*argspec), proxy_name, passed\n ) in space\n wrapper = space[name]\n wrapper.__dict__ = decorated.__dict__\n wrapper.__doc__ = decorated.__doc__\n if extra and decorated.__doc__ is not None:\n if not decorated.__doc__.startswith('%s(' % name):\n wrapper.__doc__ = \"%s%s\\n\\n%s\" % (\n name,\n _inspect.formatargspec(*dargspec),\n decorated.__doc__,\n )\n return wrapper\n\n return inner",
"def instructions(self):\n yield self.inst\n for inst in self.arguments[:]:\n yield inst\n for basic_block in self.basic_blocks[:]:\n if basic_block.function is not None:\n yield basic_block.inst\n for inst in basic_block.insts[:]:\n yield inst\n yield self.end_inst",
"def test_yield_in_const_conditional_true():\n if True:\n print((yield 1))",
"def test_add_middleware_decorator() -> None:\n bot = Phial(\"token\", {})\n\n @bot.middleware()\n def test(message: Message) -> None:\n pass\n\n assert len(bot.middleware_functions) == 1\n assert bot.middleware_functions[0] is test",
"def double_rapper(func):\n @wraps(func)\n def rapper(*args, **kwargs):\n print('I am going to run {}'.format(func.__name__))\n func(*args, **kwargs)\n print('{} finished'.format(func.__name__))\n return rapper",
"def __call__(self, gen) -> Generator:\n self.stream = gen\n self.prepare()\n return self.stream",
"def yieldroutes(func):\r\n import inspect # Expensive module. Only import if necessary.\r\n path = '/' + func.__name__.replace('__','/').lstrip('/')\r\n spec = inspect.getargspec(func)\r\n argc = len(spec[0]) - len(spec[3] or [])\r\n path += ('/:%s' * argc) % tuple(spec[0][:argc])\r\n yield path\r\n for arg in spec[0][argc:]:\r\n path += '/:%s' % arg\r\n yield path",
"def test_stepregistry_should_create_one_step_decorator_per_keyword():\n # given\n registry = StepRegistry()\n context = {}\n\n # when\n registry.create_step_decorators(context)\n\n # then\n assert len(context) == 4\n assert \"given\" in context\n assert \"when\" in context\n assert \"then\" in context\n assert \"step\" in context",
"async def context(wrapped: AsyncGenerator) -> AsyncGenerator:\n invocation = Invocation.current\n async with wrapped: # type: ignore\n invocation._become_current() # pylint: disable=protected-access\n yield ()\n invocation._become_current() # pylint: disable=protected-access",
"def composition(second, first, multiple_returns=True):\n if multiple_returns:\n\n def composed(*args, **kwargs):\n return second(*first(*args, **kwargs))\n\n else:\n\n def composed(*args, **kwargs):\n return second(first(*args, **kwargs))\n\n # add the docstring\n composed.__doc__ = \"\"\"Apply `{first_name}`, and then `{second_name}`, returning \"\"\"\n \"\"\"the result.\n\n Docstring of `{first_name}`:\n\n {first_doc}\n\n Docstring of `{second_name}`:\n\n {second_doc}\n \"\"\".format(\n first_name=first.__name__,\n second_name=second.__name__,\n first_doc=first.__doc__,\n second_doc=second.__doc__,\n )\n # make the __name__ attribute a concatenation of the two functions' names\n composed.__name__ = \"{}_then_{}\".format(first.__name__, second.__name__)\n return composed",
"def input(self, *args, **kwargs):\n if args:\n for arg in args:\n yield None, arg\n\n else:\n # if we don't have any arguments we want .output() to do one iteration\n yield None, None"
] | [
"0.61025184",
"0.6074266",
"0.60481",
"0.59272635",
"0.587607",
"0.57251585",
"0.5700559",
"0.565485",
"0.5628643",
"0.5623533",
"0.55874825",
"0.5568008",
"0.5555406",
"0.5531668",
"0.55288386",
"0.55071145",
"0.55039895",
"0.54155296",
"0.53743196",
"0.5331949",
"0.5328424",
"0.5326343",
"0.53241307",
"0.5315651",
"0.5297026",
"0.5269264",
"0.5256659",
"0.52475005",
"0.5220585",
"0.52136374",
"0.5191426",
"0.51416886",
"0.51294357",
"0.51010764",
"0.50970024",
"0.5081823",
"0.5078866",
"0.5078798",
"0.504145",
"0.5041336",
"0.5033468",
"0.50161994",
"0.50042444",
"0.49784487",
"0.4977565",
"0.49572614",
"0.4936434",
"0.49249145",
"0.4916914",
"0.4906839",
"0.49028295",
"0.49009523",
"0.48868078",
"0.4886646",
"0.48853558",
"0.48833692",
"0.4880411",
"0.48767206",
"0.48745966",
"0.48626703",
"0.48564577",
"0.48548338",
"0.48395094",
"0.48360547",
"0.48315609",
"0.48291785",
"0.48256478",
"0.48015538",
"0.47918072",
"0.47860876",
"0.47823095",
"0.47786474",
"0.4769507",
"0.47654122",
"0.47643828",
"0.47594985",
"0.47563237",
"0.47518232",
"0.47422892",
"0.4741873",
"0.47361115",
"0.47239515",
"0.4711467",
"0.47054145",
"0.4702269",
"0.46979457",
"0.46948984",
"0.46947068",
"0.4693774",
"0.46929058",
"0.4690318",
"0.46882147",
"0.4676306",
"0.46706545",
"0.4668734",
"0.4658077",
"0.46525788",
"0.4642023",
"0.4627353",
"0.46263248"
] | 0.68625414 | 0 |
This function is used to decorate middleware functions in order for their before and after sections to show up during a verbose run. For examples see documentation to this module and tests. | def named(name):
def new_annotate(mware):
def new_middleware(handler):
new_handler = mware(handler)
def verbose_handler(ctx):
_print_inwards(name)
new_ctx = new_handler(ctx)
_print_outwards(name)
return new_ctx
return verbose_handler
return new_middleware
return new_annotate | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def middleware(name, *args, **kwargs):\n\n def new_annotate(g_fn):\n def new_middleware(handler):\n def new_handler(ctx):\n _print_inwards(name)\n\n g = g_fn(ctx, *args, **kwargs)\n\n changed_ctx = next(g)\n new_ctx = handler(changed_ctx)\n last_ctx = g.send(new_ctx)\n\n _print_outwards(name)\n\n return last_ctx\n\n return new_handler\n\n return new_middleware\n\n return new_annotate",
"def middleware_after(self):\n pass",
"def wrap_and_call(ctx, handler, *middleware, verbose=False):\n global _VERBOSE_MODE\n _VERBOSE_MODE = verbose\n\n middleware_ = list(middleware)\n\n return compose(*reversed(middleware_))(handler)(ctx)",
"def test_add_middleware_decorator() -> None:\n bot = Phial(\"token\", {})\n\n @bot.middleware()\n def test(message: Message) -> None:\n pass\n\n assert len(bot.middleware_functions) == 1\n assert bot.middleware_functions[0] is test",
"def wrap_in_middleware(app, global_conf, application_stack, **local_conf):\n stack = application_stack\n # Merge the global and local configurations\n conf = global_conf.copy()\n conf.update(local_conf)\n debug = asbool(conf.get('debug', False))\n # First put into place httpexceptions, which must be most closely\n # wrapped around the application (it can interact poorly with\n # other middleware):\n app = wrap_if_allowed(app, stack, httpexceptions.make_middleware, name='paste.httpexceptions', args=(conf,))\n # The recursive middleware allows for including requests in other\n # requests or forwarding of requests, all on the server side.\n if asbool(conf.get('use_recursive', True)):\n from paste import recursive\n app = wrap_if_allowed(app, stack, recursive.RecursiveMiddleware, args=(conf,))\n # Various debug middleware that can only be turned on if the debug\n # flag is set, either because they are insecure or greatly hurt\n # performance\n if debug:\n # Middleware to check for WSGI compliance\n if asbool(conf.get('use_lint', True)):\n from paste import lint\n app = wrap_if_allowed(app, stack, lint.make_middleware, name='paste.lint', args=(conf,))\n # Middleware to run the python profiler on each request\n if asbool(conf.get('use_profile', False)):\n import profile\n app = wrap_if_allowed(app, stack, profile.ProfileMiddleware, args=(conf,))\n # Middleware that intercepts print statements and shows them on the\n # returned page\n if asbool(conf.get('use_printdebug', True)):\n from paste.debug import prints\n app = wrap_if_allowed(app, stack, prints.PrintDebugMiddleware, args=(conf,))\n if debug and asbool(conf.get('use_interactive', False)):\n # Interactive exception debugging, scary dangerous if publicly\n # accessible, if not enabled we'll use the regular error printing\n # middleware.\n try:\n from weberror import evalexception\n app = wrap_if_allowed_or_fail(app, stack, evalexception.EvalException,\n args=(conf,),\n kwargs=dict(templating_formatters=build_template_error_formatters()))\n except MiddlewareWrapUnsupported as exc:\n log.warning(str(exc))\n import galaxy.web.framework.middleware.error\n app = wrap_if_allowed(app, stack, galaxy.web.framework.middleware.error.ErrorMiddleware, args=(conf,))\n else:\n # Not in interactive debug mode, just use the regular error middleware\n import galaxy.web.framework.middleware.error\n app = wrap_if_allowed(app, stack, galaxy.web.framework.middleware.error.ErrorMiddleware, args=(conf,))\n # Transaction logging (apache access.log style)\n if asbool(conf.get('use_translogger', True)):\n from paste.translogger import TransLogger\n app = wrap_if_allowed(app, stack, TransLogger)\n # X-Forwarded-Host handling\n from galaxy.web.framework.middleware.xforwardedhost import XForwardedHostMiddleware\n app = wrap_if_allowed(app, stack, XForwardedHostMiddleware)\n return app",
"def decorate(func):\n from aha.dispatch.router import get_router\n r = get_router()\n r.connect(None, path, controller = func, **params)\n return func",
"def moo(func):\n def decorated(*args, **kwargs):\n print 'moo'\n return func(*args, **kwargs) # Run decorated function.\n return decorated",
"def important(func):\n\n def decorated(*args, **kwargs):\n \"\"\"Decorated method.\"\"\"\n runLog.important(func(*args, **kwargs))\n\n return decorated",
"def QueryParametersMidware(event, context):\n\n def decorator_function(func):\n \"\"\"Decorator: performs query parameter checking and passes response\n\n Arguments:\n func (function): inner function that takes the response\n\n Returns:\n (function): wrapped function\n \"\"\"\n\n def wrapper(resp):\n \"\"\"Inner function: performs media type checking and passes response\n\n Arguments:\n resp (Response): Response object\n \n Returns:\n (Response): Response object, modified by query parameter midware\n \"\"\"\n\n # perform query param middleware function, which modifies the \n # response status code/headers/body as necessary\n # if status code is still OK at end of function, then execute the\n # inner function\n QueryParametersMW.middleware_func(event, resp)\n if resp.get_status_code() == SC.OK:\n return func(resp)\n else:\n return resp\n \n return wrapper\n\n return decorator_function",
"def wrapper_fun(*args):\n print(\"Hello Decorator\")\n return fun(*args)",
"def register_middleware(self, middleware, opts={}):\n self._middleware.append((middleware, opts))",
"def test_wraps():\n print('func')",
"def wrap(*args, **kwargs):\n\n print('>> Decorate before executing \"greeting_message\" function')\n func = f(*args, **kwargs)\n print('>> Decorate after executing \"greeting_message\" function')\n return func",
"def prep_logging_decorator(func):\n filename = filename_regexp.match(inspect.getmodule(inspect.stack()[1][0]).__file__).group(1)\n\n @functools.wraps(func)\n def wrapper(*args, **kwargs):\n logger.info('{}: Start preparation'.format(filename))\n func(*args, **kwargs)\n logger.info('{}: Finished preparation'.format(filename))\n return wrapper",
"def before_and_after_function_wrapper(func):\n\n @functools.wraps(func)\n def function_wrapper(*args, **kwargs):\n self_obj = args[0]\n before_func = getattr(self_obj, '_before_{}'.format(func.__name__), None)\n after_func = getattr(self_obj, '_after_{}'.format(func.__name__), None)\n\n if before_func is not None:\n before_func(*args, **kwargs)\n\n to_return = func(*args, **kwargs)\n\n if after_func is not None:\n after_func(to_return)\n\n return to_return\n\n return function_wrapper",
"def add(self, middleware):\n pass # pragma: no cover",
"def setup_stage(method):\n def decorator(self):\n name = method.func_name\n if should_run_stage(name):\n say(\"Setup.%s\" % name)\n method(self)\n set_stage(name)\n decorator.__doc__ = method.__doc__\n return decorator",
"def add_setup(setup=None, teardown=None):\n def decorate_function(test):\n def wrapper(self):\n if setup:\n setup(self)\n test(self)\n if teardown:\n teardown(self)\n return wrapper\n return decorate_function",
"def _inject_trace_middleware_to_args(trace_middleware, args, kwargs):\n # type: (Callable, Tuple, Dict) -> Tuple[Tuple, Dict]\n middlewares_arg = 8\n if _graphql_version >= (3, 2):\n # middleware is the 10th argument graphql.execute(..) version 3.2+\n middlewares_arg = 9\n\n # get middlewares from args or kwargs\n try:\n middlewares = get_argument_value(args, kwargs, middlewares_arg, \"middleware\") or []\n if isinstance(middlewares, MiddlewareManager):\n # First we must get the middlewares iterable from the MiddlewareManager then append\n # trace_middleware. For the trace_middleware to be called a new MiddlewareManager will\n # need to initialized. This is handled in graphql.execute():\n # https://github.com/graphql-python/graphql-core/blob/v3.2.1/src/graphql/execution/execute.py#L254\n middlewares = middlewares.middlewares # type: Iterable\n except ArgumentError:\n middlewares = []\n\n # Note - graphql middlewares are called in reverse order\n # add trace_middleware to the end of the list to wrap the execution of resolver and all middlewares\n middlewares = list(middlewares) + [trace_middleware]\n\n # update args and kwargs to contain trace_middleware\n args, kwargs = set_argument_value(args, kwargs, middlewares_arg, \"middleware\", middlewares)\n return args, kwargs",
"def middleware(self, *args, **kwargs):\n return super(Blueprint, self).middleware(*args, **kwargs)",
"def decorate(func, *args, **kws):\n def do_authenticate():\n \"\"\"\n A function to perform authentication\n every time decorated function is called.\n \"\"\"\n #try:\n if 1:\n if 'referer' not in self.session:\n path = urlsplit(self.request.url)[2]\n self.session['referer'] = path\n self.session.put()\n #except:\n # pass\n aobj = self.config.auth_obj()\n self.get_controller()\n auth_res = aobj.auth(self.controller, *args, **kws)\n if auth_res:\n return func(*args, **kws)\n aobj.auth_redirect(self.controller, *args, **kws)\n # clear controller for development environment.\n\n return do_authenticate",
"def enable_audit_logging(f):\n\n @wraps(f)\n def wrapper(*args, **kwargs):\n def create_audit_log_for_request_decorator(response):\n return create_audit_log_for_request(response)\n\n if is_audit_enabled():\n # we can't add the `after_this_request` and\n # `create_audit_log_for_request_decorator` decorators to the\n # functions directly, because `is_audit_enabled` depends on\n # the config being loaded\n flask.after_this_request(create_audit_log_for_request_decorator)\n return f(*args, **kwargs)\n\n return wrapper",
"def decorator(module_fn):\n _FILTERS_AND_SAMPLERS.append((filter_, module_fn))\n return module_fn",
"def some(func):\n def wrapper(* args,** kwargs):\n logging.basicConfig(filename='error.log',level=logging.DEBUG)\n logging.info(request.url + \" : \" + str(request.remote_addr)+\" using function \"+func.__name__ )\n return func(* args,** kwargs)\n\n wrapper.__name__ = func.__name__ \n return wrapper",
"def instrument_flask():\n oc_trace_config = app.config.get('OPENCENSUS_TRACE', {})\n oc_trace_config.update({\n 'EXPORTER': trace_exporter.TraceExporter,\n 'PROPAGATOR': trace_context_http_header_format.TraceContextPropagator\n })\n app.config.update(OPENCENSUS_TRACE=oc_trace_config)\n return flask_middleware.FlaskMiddleware(app)",
"def _print_inwards(middleware_name):\n if _VERBOSE_MODE:\n print('{}--->'.format(middleware_name))",
"def wsgiapp():\n def decorator(func):\n def wsgiapp_wrapper(*args):\n # we get 3 args when this is a method, two when it is\n # a function :(\n if len(args) == 3:\n environ = args[1]\n start_response = args[2]\n args = [args[0]]\n else:\n environ, start_response = args\n args = []\n def application(environ, start_response):\n form = request.parse_formvars(environ,\n include_get_vars=True)\n status = '200 OK'\n form['environ'] = environ\n try:\n res = func(*args, **form.mixed())\n except ValueError, ve:\n status = '500 Server Error'\n res = '<html>There was an error: %s</html>' % \\\n html_quote(ve)\n start_response(status, [('content-type', 'text/html')])\n return [res]\n app = simplecatcher(application)\n return app(environ, start_response)\n wsgiapp_wrapper.exposed = True\n return wsgiapp_wrapper\n return decorator",
"def test_decorator_middleware(self):\n request = self.factory.get(reverse('contact:home'))\n\n # middleware don't store request to decorated function\n decorated_func = not_record_request(home_page)\n request.user = self.user\n self.middleware.process_view(request, decorated_func)\n rs = RequestStore.objects.all()\n self.assertQuerysetEqual(rs, [])\n\n # middleware store request to undecorated function\n request.user = self.user\n self.middleware.process_view(request, home_page)\n rs = self.request_store.objects.all()\n self.assertEquals(len(rs), 1)\n only_one_rs = rs[0]\n self.assertEqual(only_one_rs.path, reverse('contact:home'))\n\n # middleware store request to undecorated function if user is anonymous\n request.user = AnonymousUser()\n self.middleware.process_view(request, home_page)\n rs = self.request_store.objects.all()\n self.assertEquals(len(rs), 2)\n only_one_rs = rs[1]\n self.assertEqual(only_one_rs.path, reverse('contact:home'))",
"def greeting_message_decorator(f):\n\n def wrap(*args, **kwargs):\n \"\"\" Here we can print *aregs and **kwargs and manipulate them \"\"\"\n\n print('>> Decorate before executing \"greeting_message\" function')\n func = f(*args, **kwargs)\n print('>> Decorate after executing \"greeting_message\" function')\n return func\n return wrap",
"def captive(f):\n\n def wrapper(self, request, *args, **kwargs):\n return captiveHandler(request) or f(self, request, *args, **kwargs)\n functools.update_wrapper(wrapper, f)\n return wrapper",
"def info(func):\n\n def decorated(*args, **kwargs):\n r\"\"\"Decorated method.\"\"\"\n runLog.info(func(*args, **kwargs))\n\n return decorated",
"def trace(func):\n @wraps(func)\n def tracer(*args, **kwargs):\n name = func.__name__\n stack_size = int(len(inspect.stack(0)) / 2) # @wraps(func) is also increasing the size\n indent = stack_size*'\\t'\n print(f'{indent} > Entering \"{name}\": args: {args}')\n result = func(*args, **kwargs)\n print(f'{indent} < Leaving \"{name}\"')\n return result\n\n return tracer",
"def decorator(func):\n\t\treturn push_aspect(name or func.__name__, func)",
"def decor(func):\n def wrap():\n print(\"@@@ STATISTICS REPORT START @@@\\n\")\n func()\n print(\"@@@ STATISTICS REPORT FINISH @@@\\n\")\n return wrap",
"def _print_outwards(middleware_name):\n if _VERBOSE_MODE:\n print('<---{}'.format(middleware_name))",
"def load_middleware(*args, **kwargs):\n inject_middleware()\n BaseHandler.load_middleware = original_load_middleware\n return original_load_middleware(*args, **kwargs)",
"def module(filter_):\n def decorator(module_fn):\n \"\"\"Decorates a module function.\"\"\"\n _FILTERS_AND_SAMPLERS.append((filter_, module_fn))\n return module_fn\n return decorator",
"def get_viewfunc(self):\n # Generate some common middlewares\n if self.user_auth is not False:\n self.middlewares.append(UserAuthMiddleware(self.user_auth))\n\n if self.client_auth is not False:\n self.middlewares.append(ClientAuthMiddleware())\n\n if self.arguments:\n self.middlewares.append(ArgumentMiddleware(self.arguments))\n\n if self.paged:\n self.middlewares.append(PagingMiddleware())\n\n # Return the viewfunc, wrapped with requested middlewares\n return generate_viewfunc(self.viewfunc, self.middlewares)",
"def debug_decorator(func):\n\n def wrapper(*args, **kwargs):\n\n from main_loop import debug_mode\n\n if debug_mode:\n\n game_logger.logging.debug(\"Function name: \" + func.__name__)\n\n game_logger.logging.debug(\"Args: \")\n game_logger.logging.debug(args)\n\n game_logger.logging.debug(\"Kwargs: \")\n game_logger.logging.debug(kwargs)\n\n return func(*args, **kwargs)\n\n return wrapper",
"def trace(filler):\n @decorator\n def dec(func):\n def wrapper(*args, **kwargs):\n indent = filler * wrapper.level\n arguments = ', '.join(str(x) for x in args)\n print('{} --> {}({})'.format(indent, func.__name__, arguments))\n wrapper.level += 1\n\n result = func(*args, **kwargs)\n print('{} <-- {}({}) == {}'.format(indent, func.__name__, arguments, result))\n wrapper.level -= 1\n return result\n wrapper.level = 0\n return wrapper\n return dec",
"def configure_before_request_funcs(app):\n @app.before_request\n def conf_set_user_cookie_id():\n return set_user_cookie_id()\n \n @app.before_request\n def check_for_maintenance():\n if config.DOWN_FOR_MAINTENANCE:\n return 'Sorry, we\\'re down momentarily for a teensey bit of maintenance!', 503\n \n @app.before_request\n def count_uniques():\n return\n statsd.set('unique_users', g.user_cookie_id)\n statsd.set('unique_ips', request.remote_addr)\n \n @app.before_request\n def set_statsd_context():\n g.statsd_context = \"%s.%s\" % (request.endpoint, request.method)\n g.total_request_timer = statsd.timer(g.statsd_context + \".response_time\")\n g.total_request_timer.start()",
"def generate_viewfunc(final_viewfunc, middlewares):\n accepted_kwargs = []\n for param in inspect.signature(final_viewfunc).parameters.values():\n if param.kind == param.POSITIONAL_ONLY:\n raise ValueError(\"%s expects positional argument %s\"\n % (final_viewfunc, param.name))\n elif param.kind == param.VAR_POSITIONAL:\n raise ValueError(\"%s expects var-positional argument %s\"\n % (final_viewfunc, param.name))\n elif param.kind == param.VAR_KEYWORD:\n raise ValueError(\"%s expects var-keyword argument %s\"\n % (final_viewfunc, param.name))\n\n accepted_kwargs.append(param.name)\n\n wants_ldap = 'ldap' in accepted_kwargs\n\n def caller():\n kwargs = {\n \"log\": APP.logger,\n }\n\n for middleware in middlewares:\n output = middleware.request_infos()\n if output:\n APP.logger.debug(\"Middleware %s generated kwargs: %s\",\n middleware,\n output)\n kwargs.update(output)\n\n result = middleware.intermediate_viewfunc()\n if result is not None:\n APP.logger.debug(\"Middleware %s returned: %s\",\n middleware,\n result)\n return result\n\n # Build the LDAP client\n if wants_ldap:\n ldap_client = LdapClient(\n APP.logger,\n kwargs.get('user_tokeninfo'),\n kwargs.get('client_info'),\n )\n kwargs['ldap'] = ldap_client\n\n APP.logger.debug(\"Got args %s for viewfunc %s pre-filter\",\n kwargs,\n final_viewfunc)\n\n kwargs = {key: kwargs[key] for key in kwargs if key in accepted_kwargs}\n\n APP.logger.debug(\"Calling final viewfunc %s with args %s\",\n final_viewfunc,\n kwargs)\n res = final_viewfunc(**kwargs)\n resp = {}\n if isinstance(res, tuple):\n res, resp = res\n resp['result'] = res\n\n headers = {\n \"Content-Security-Policy\": \"default-src: 'none'\",\n \"Feature-Policy\": \"\",\n \"X-Frame-Options\": \"DENY\",\n \"X-Xss-Protection\": \"1; mode=block\",\n \"X-Content-Type-Options\": \"nosniff\",\n \"Referrer-Policy\": \"no-referrer\",\n }\n for middleware in middlewares:\n new_resp = middleware.manipulate_response(resp, kwargs)\n extra_headers = None\n if new_resp and isinstance(new_resp, tuple):\n new_resp, extra_headers = new_resp\n if new_resp is not None:\n APP.logger.debug(\"Middleware %s manipulated response\",\n middleware)\n resp = new_resp\n if extra_headers is not None:\n APP.logger.debug(\"Middleware %s added headers: %s\",\n middleware, extra_headers)\n headers.update(extra_headers)\n\n return resp, headers\n return caller",
"def test_uses_wraps(self):\n @self.actions(\"ctx_name\", [])\n def myview(request, some_id):\n \"\"\"docstring\"\"\"\n\n self.assertEqual(myview.func_name, \"myview\")\n self.assertEqual(myview.func_doc, \"docstring\")",
"def _debug_wrap(func):\n\n def wrapper(*args, **kwargs):\n _debug_print(f\"{datetime.datetime.now()} - About to run: {func.__name__}\")\n ret_val = func(*args, **kwargs)\n _debug_print(f\"{datetime.datetime.now()} - Completed run: {func.__name__}\")\n return ret_val\n\n return wrapper",
"def warmer_handler(f):\n\n @wraps(f)\n def wrapper(*args, **kwargs):\n if args:\n event = args[0]\n if event.get('warmer'):\n current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n lambda_name = os.getenv('AWS_LAMBDA_FUNCTION_NAME')\n stream_name = os.getenv('AWS_LAMBDA_LOG_STREAM_NAME')\n print(json.dumps({\n \"service\": \"warmer\",\n \"lambda_name\": lambda_name,\n \"stream_name\": stream_name,\n \"invoke_time\": current_time\n }))\n time.sleep(event.get('delay', 0.3))\n return\n return f(*args, **kwargs)\n\n return wrapper",
"def wrap(before=None, after=None, condition=lambda *args, **kwargs: True):\n def decorator(func):\n @functools.wraps(func)\n def wrapped(*args, **kwargs):\n yes = condition(*args, **kwargs)\n if yes and before:\n before()\n result = func(*args, **kwargs)\n if yes and after:\n after()\n return result\n return wrapped\n return decorator",
"def test_add_middleware_command() -> None:\n\n def test(message: Message) -> None:\n pass\n\n bot = Phial(\"token\", {})\n bot.add_middleware(test)\n\n assert len(bot.middleware_functions) == 1\n assert bot.middleware_functions[0] is test",
"def before(self, before: Route.Decorator):\n pass",
"def func_custom(fname):\n def decorator(f):\n def decorated(*idp, **kwp):\n global tracer_data\n if hasattr(tracer_data, 'enabled') and tracer_data.enabled:\n try:\n call(fname)\n return f(*idp, **kwp)\n finally:\n ret()\n else:\n return f(*idp, **kwp)\n return decorated\n return decorator",
"def prepend_middleware(self, *args):\n for arg in args:\n if arg and arg not in self.list_middleware:\n self.list_middleware.insert(0, arg)\n return self",
"def inner(func):\r\n\r\n service = func.__qualname__.split(\".\")[0]\r\n _Router().add_route(\r\n service=service,\r\n grpc_method=func.__name__,\r\n url_path=url,\r\n http_method=method\r\n )\r\n if pre_request is not None and len(pre_request) > 0:\r\n _MiddlewareManager().add_route_pre_middleware(pre_request, url)\r\n if pos_request is not None and len(pos_request) > 0:\r\n _MiddlewareManager().add_route_pre_middleware(pos_request, url)\r\n return func",
"def before_test(self, func, *args, **kwargs):\n pass",
"def __call__(self, func):\n\n # set logger if it was not set earlier\n if not self.logger:\n logging.basicConfig()\n self.logger = logging.getLogger(func.__module__)\n\n @functools.wraps(func)\n def wrapper(*args, **kwds):\n self.logger.debug(\n self.ENTRY_MESSAGE.format(func.__name__)) # logging level .info(). Set to .debug() if you want to\n f_result = func(*args, **kwds)\n self.logger.debug(\n self.EXIT_MESSAGE.format(func.__name__)) # logging level .info(). Set to .debug() if you want to\n return f_result\n\n return wrapper",
"def wrap_analyser(func, *args, **kwargs):\n\n def wrapped():\n return func(*args, **kwargs)\n return wrapped",
"def __call__(self, environ, start_response):\n middleware = Middleware(environ, start_response)\n middleware.application = self\n return middleware",
"def apply_middleware(self, page, html):\n for middleware in self.middleware:\n html = middleware(page, html)\n return html",
"def debugargs(prefix='***'):\n def debug(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n print(prefix + ': ' + func.__qualname__)\n return func(*args, **kwargs)\n return wrapper\n return debug",
"def decorator(func):\n return survey_started(require(numbers)(set_navbar(minutes)(func)))",
"def before_request():\n pass",
"def decorated(origFunc, newFunc, decoration='None'):\n\n pass",
"def middleware(self, *args):\n for arg in args:\n if arg and arg not in self.list_middleware:\n self.list_middleware.append(arg)\n return self",
"def advice(*args, **kwargs):\n # TODO: make implementation as stated above\n return function(*args, **kwargs)",
"def test_parameters(self):\n class Test(pyperry.Base): pass\n Test.add_middleware('read', self.Middle)\n Test.add_middleware('read', self.Middle, { 'foo': 'bar' })\n Test.add_middleware('read', self.Middle, foo='bar')",
"def debug(func):\n\n @functools.wraps(func)\n def decorated(*args, **kwargs):\n if args and not kwargs:\n print(\"~ input of {}: args: {}\".format(func.__name__, args))\n elif not args and kwargs:\n print(\"~ input of {}: kwargs: {}\".format(func.__name__, kwargs))\n elif args and kwargs:\n print(\"~ input of {}: args: {}, kwargs: {}\".format(func.__name__, args, kwargs))\n else:\n print(\"~ input of {}: NO_ARGS\".format(func.__name__))\n output = func(*args, **kwargs) # stores the result of the function\n print(\"~ output of {}:\".format(func.__name__), output)\n return output\n\n return decorated",
"def wrapped(func):\n self.routes.append((path, {\n 'regex': re.compile('^' + re.sub(self._part_matcher,'(.*?)',path) + '$'),\n 'function':func,\n 'reqs':req,\n 'kwargs':kwargs,\n 'parts':parts_info,\n 'generate':generate\n }))\n\n return func",
"def wrap_before(before, condition=lambda *args, **kwargs: True):\n def decorator(func):\n @functools.wraps(func)\n def wrapped(*args, **kwargs):\n if condition(*args, **kwargs):\n before()\n return func(*args, **kwargs)\n return wrapped\n return decorator",
"def bofore_response_handle(self, func):\n self.before_response.append(func)\n return func",
"def before_request(self, func: typing.Callable):\n return self.add_hook(type_=\"pre\", hook=func)",
"def debug(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n if callable(func):\n print('DEBUG: ' + func.__qualname__)\n return func(*args, **kwargs)\n elif isinstance(func, staticmethod) or isinstance(func, classmethod):\n print('DEBUG: ' + func.__func__.__qualname__)\n print(args)\n print(kwargs)\n return func.__func__(*args, **kwargs)\n\n return wrapper",
"def get_middleware(exclude=(), append=(),\n current={'middleware': MIDDLEWARE_CLASSES}):\n\n current['middleware'] = tuple(\n [m for m in current['middleware'] if m not in exclude]\n ) + tuple(append)\n return current['middleware']",
"async def test_middleware_multiple(self, dm):\n\n async def _first(ctx, responder, handler):\n responder.frame[\"middles\"] = responder.frame.get(\"middles\", []) + [\"first\"]\n await handler(ctx, responder)\n\n async def _second(ctx, responder, handler):\n responder.frame[\"middles\"] = responder.frame.get(\"middles\", []) + [\"second\"]\n await handler(ctx, responder)\n\n async def _handler(ctx, responder):\n # '_first' should have been called first, then '_second'\n assert responder.frame[\"middles\"] == [\"first\", \"second\"]\n\n dm.add_middleware(_first)\n dm.add_middleware(_second)\n dm.add_dialogue_rule(\"middleware_test\", _handler, intent=\"middle\")\n ctx = create_request(\"domain\", \"middle\")\n result = await dm.apply_handler(ctx, create_responder(ctx))\n assert result.dialogue_state == \"middleware_test\"",
"def test_stepregitry_register_func_with_multiple_decorators():\n # given\n registry = StepRegistry()\n context = {}\n registry.create_step_decorators(context)\n\n # when\n def test_step():\n ...\n\n test_step = context[\"given\"](\"pattern\")(test_step)\n test_step = context[\"when\"](\"pattern\")(test_step)\n\n # then\n assert registry.step_implementations(\"Given\") == [\n StepImpl(\"Given\", \"pattern\", test_step)\n ]\n assert registry.step_implementations(\"When\") == [\n StepImpl(\"When\", \"pattern\", test_step)\n ]",
"def slack_augment(function):\n\n def wrapped(request, *args, **kwargs):\n \n SlackRequest.augment(request)\n try: token = request.slack_token\n except: return _Http403()\n #print (\"token: {0.slack_token}\".format(request))\n #print (\"team id: {0.slack_tid}\".format(request))\n \n kvs = KVS.kvs_get(\"slack::token\")\n teamid = kvs.get(token, False)\n if teamid == \"true\" or teamid == \"True\" or teamid == \"TRUE\": teamid = True\n # check whether the token is in the key value storage\n # if yes, get team id and check whether it is 'true' -> set True (every team matches)\n if teamid == False:\n # teamid == False -> not in storage, so check in settings\n try: teamid = settings.SLACK_ACCESS[token]\n except: teamid = False\n \n # now we've got the teamid, see whether we need to check (ie, not True)\n if teamid != True:\n if not teamid == request.slack_tid: teamid = False\n \n # if we could not validate token (and possibly team), deny\n if teamid == False: return _Http403()\n \n # render the request using the wrapped function\n response = function(request, *args, **kwargs)\n if isinstance(response, SlackResponse): return response.as_json_response()\n return response\n\n if (function.__doc__ != None): wrapped.__doc__=function.__doc__+\"\\n\\n[decorated by @slack_augment]\\n\"\n wrapped.__name__=function.__name__\n return wrapped",
"def construct_fixture_middleware(fixtures):\n def fixture_middleware(make_request, web3):\n def middleware(method, params):\n if method in fixtures:\n result = fixtures[method]\n return {'result': result}\n else:\n return make_request(method, params)\n return middleware\n return fixture_middleware",
"def decorator(fn):\n @functools.wraps(fn)\n def result(*args, **kwargs):\n request_time = datetime.datetime.now()\n actual_response = fn(*args, **kwargs)\n request = bottle.request\n response = bottle.response\n # modify this to log exactly what you need:\n logger.info('%s %s %s %s %s', request.remote_addr,\n request_time,\n request.method,\n request.url,\n response.status)\n logger.info('Cookies: %s', request.get_cookie('login'))\n logger.info('Handeled by: \"%s\" in file: \"%s\"', fn.__name__, SCRIPT_NAME)\n\n return actual_response\n return result",
"def _context(name, func):\n\tpush_aspect(name, func)\n\tyield\n\tpop_aspect(name)",
"def on_load_middleware():\n\n # protect middleware wrapping: only a single thread proceeds\n global load_middleware_lock # lock gets overwritten as None after init\n if not load_middleware_lock: # already initialized? abort\n return\n mwlock = load_middleware_lock\n mwlock.acquire() # acquire global lock\n if not load_middleware_lock: # check again\n mwlock.release() # abort\n return\n load_middleware_lock = None # mark global as \"init done\"\n\n try:\n # middleware hooks\n from django.conf import settings\n for i in settings.MIDDLEWARE_CLASSES:\n if i.startswith('oboe'):\n continue\n dot = i.rfind('.')\n if dot < 0 or dot+1 == len(i):\n continue\n objname = i[dot+1:]\n imports.whenImported(i[:dot],\n functools.partial(middleware_hooks, objname=objname)) # XXX Not Python2.4-friendly\n\n # ORM\n if oboe.config['inst_enabled']['django_orm']:\n from oboeware import inst_django_orm\n imports.whenImported('django.db.backends', inst_django_orm.wrap)\n\n # templates\n if oboe.config['inst_enabled']['django_templates']:\n from oboeware import inst_django_templates\n import django\n if StrictVersion(django.get_version()) >= StrictVersion('1.3'):\n imports.whenImported('django.template.base', inst_django_templates.wrap)\n else:\n imports.whenImported('django.template', inst_django_templates.wrap)\n\n # load pluggaable instrumentation\n from loader import load_inst_modules\n load_inst_modules()\n\n # it's usually a tuple, but sometimes it's a list\n if type(settings.MIDDLEWARE_CLASSES) is tuple:\n settings.MIDDLEWARE_CLASSES = ('oboeware.djangoware.OboeDjangoMiddleware',) + settings.MIDDLEWARE_CLASSES\n elif type(settings.MIDDLEWARE_CLASSES) is list:\n settings.MIDDLEWARE_CLASSES = ['oboeware.djangoware.OboeDjangoMiddleware'] + settings.MIDDLEWARE_CLASSES\n else:\n print >> sys.stderr, \"Oboe error: thought MIDDLEWARE_CLASSES would be either a tuple or a list, got \" + \\\n str(type(settings.MIDDLEWARE_CLASSES))\n\n finally: # release instrumentation lock\n mwlock.release()\n\n try:\n add_rum_template_tags()\n except Exception, e:\n print >> sys.stderr, \"Oboe error: couldn't add RUM template tags: %s\" % (e,)",
"def req_as_decorator(req_output, *args, **kwargs):\r\n return req_output(dummy_func)(*args, **kwargs)",
"def inject_middleware():\n if 'appmap.django.Middleware' not in settings.MIDDLEWARE:\n settings.MIDDLEWARE.insert(0, 'appmap.django.Middleware')",
"def debug(debug=False):\n def decorator(func):\n @functools.wraps(func)\n def wrapper(*args, **kwargs):\n if debug:\n print(f\"Calling {func.__name__.upper()}\")\n value = func(*args, **kwargs)\n return value\n else:\n return func(*args, **kwargs)\n\n return wrapper\n\n return decorator",
"def inspect(decorated_function):\n def wrapper(*args, **kwargs):\n \"\"\"Wrapper function that adds functionality to decorated function\"\"\"\n print('Before function')\n value = decorated_function(*args, **kwargs)\n print('After function')\n return value\n return wrapper",
"async def pre_middleware(\n self,\n event: T_contra,\n context_variables: Optional[dict] = None,\n ) -> Optional[List[BaseMiddleware]]:\n mw_instances = []\n\n for middleware in self.middlewares:\n mw_instance = middleware(event, view=self)\n await mw_instance.pre()\n if not mw_instance.can_forward:\n logger.debug(\"{} pre returned error {}\", mw_instance, mw_instance.error)\n return None\n\n mw_instances.append(mw_instance)\n\n if context_variables is not None:\n context_variables.update(mw_instance.context_update)\n\n return mw_instances",
"def _log(fn):\n @wraps(fn)\n def wrapper(self, *args, **kwargs):\n self.log.append(fn.__name__ + ' :: args={} kwargs={}'.format(args, kwargs))\n return fn(self, *args, **kwargs)\n return wrapper",
"def test_setup(funct):\n\n def decorated_setup():\n \"\"\"Decorated test setup.\"\"\"\n testdb.reload_db()\n funct()\n return decorated_setup",
"def __call__(self, func):\n\n # set logger if it was not set earlier\n if not self.logger:\n logging.basicConfig()\n self.logger = logging.getLogger(func.__module__)\n\n @functools.wraps(func)\n def wrapper(*args, **kwds):\n st = datetime.datetime.now()\n f_result = func(*args, **kwds)\n et = datetime.datetime.now()\n self.logger.debug(\"%s duration: %s\" % (func.__name__, et - st))\n return f_result\n\n return wrapper",
"def logging(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n res = func(*args, **kwargs)\n print(func.__name__, args, kwargs)\n return res\n return wrapper",
"def decorator(func):\n\n pass",
"def sandwich(func):\n @functools.wraps(func)\n def wrapped_decorator(*args, **kwargs):\n print(UPPER_SLICE)\n arguments= func(*args, **kwargs)\n print(LOWER_SLICE)\n return arguments\n return wrapped_decorator",
"def setDecorated(self, decorated):",
"def function_logger(orig_func):\n # Logging initialization\n import logging\n logger = logging.getLogger(__name__)\n\n @wraps(orig_func)\n def wrapper(*args, **kwargs):\n logger.info('Ran with args: {}, and kwargs: {}'.format(args, kwargs))\n return orig_func(*args, **kwargs)\n\n return wrapper",
"def wrapperfunc(func):\n def wrap_wrapper(self, *args, **kwargs):\n def inner_wrapper(arg):\n apifunc = get_apifunc(arg)\n func(self, apifunc, *args, **kwargs)\n return apifunc\n return inner_wrapper\n return wrap_wrapper",
"def before_request(self, f):\n self.before_request_funcs.append(f)\n return f",
"def survey_question(numbers, minutes):\n def decorator(func):\n \"\"\"Condenses survey_started, require, and set_navbar\"\"\"\n return survey_started(require(numbers)(set_navbar(minutes)(func)))\n return decorator",
"def get_middlewares(config):\n middlewares = (\n # First, normalize request path, which may result in redirect\n normalize_path_middleware(append_slash=True, merge_slashes=True),\n # After, enable error middleware to catch all errors from any code below\n # error_middleware,\n # Validate API requests\n validation_middleware,\n )\n return middlewares",
"def decorate(func, *args, **kws):\n self.func = func\n def do_authenticate():\n auth_header = self.request.headers.get('Authorization', '')\n if auth_header.split():\n scheme, code = auth_header.split()\n else:\n scheme = 'Basic'\n code = ''\n if scheme != 'Basic':\n raise ValueError('The authentication scheme is not BASIC')\n if b64decode(code):\n user, password = b64decode(code).split(':')\n else:\n user = password = ''\n if self.user == user and self.password == password:\n # the request already had valid authentication header.\n return self.func(*args, **kws)\n resp = self.response\n resp.set_status(401)\n self.render('Auth')\n resp.headers['WWW-Authenticate'] = 'Basic realm=\"%s\"' % self.realm\n\n return do_authenticate",
"def wrapper(function):\n\n @functools.wraps(function)\n def wrapped(*args, **kwargs):\n \"\"\"\n Wrapped function.\n \"\"\"\n\n with context:\n return function(*args, **kwargs)\n\n return wrapped",
"def test_ignore_lack_of_metadata():\n\n def original(something, dispatcher, intent):\n \"\"\"Original!\"\"\"\n pass\n\n new_func = partial(original, \"something\")\n original.attr = 1\n wrapped = do(new_func)\n assert wrapped.__name__ == \"do_wrapper\"",
"def info_decorator(func):\n\n def wrapper(*args, **kwargs):\n\n return func(*args, **kwargs)\n\n return wrapper",
"def webapp_add_wsgi_middleware(app):\n from google.appengine.ext.appstats import recording\n return recording.appstats_wsgi_middleware(app)",
"def test_middleware_loads(self):\n self.client.get(\"/__debug__\")"
] | [
"0.65105826",
"0.62661564",
"0.61409885",
"0.60957867",
"0.59238327",
"0.59235173",
"0.5898055",
"0.58747447",
"0.5837454",
"0.5817847",
"0.5798494",
"0.57882106",
"0.5778773",
"0.5720582",
"0.57170993",
"0.5715777",
"0.5690158",
"0.5687903",
"0.568684",
"0.5677401",
"0.565879",
"0.56583214",
"0.5621996",
"0.561789",
"0.56129867",
"0.55898553",
"0.5571235",
"0.55538285",
"0.55354685",
"0.54981786",
"0.546906",
"0.54679054",
"0.5464448",
"0.5454346",
"0.54390794",
"0.54369915",
"0.54317874",
"0.5427017",
"0.54266953",
"0.54181224",
"0.5410271",
"0.5408827",
"0.54020625",
"0.5399393",
"0.53968436",
"0.53763354",
"0.5350735",
"0.5346321",
"0.53425074",
"0.5333352",
"0.52989995",
"0.5272011",
"0.52685326",
"0.52666783",
"0.5256497",
"0.5234284",
"0.52332634",
"0.52308923",
"0.5229187",
"0.52241063",
"0.521214",
"0.51946056",
"0.5190011",
"0.5177121",
"0.5165166",
"0.5152435",
"0.5140699",
"0.5134188",
"0.51306534",
"0.5120496",
"0.51069564",
"0.50976634",
"0.50856316",
"0.50825596",
"0.50812006",
"0.5074346",
"0.5073804",
"0.50673646",
"0.506695",
"0.5064853",
"0.5052057",
"0.5049088",
"0.50485104",
"0.50483555",
"0.50314075",
"0.5025491",
"0.5019342",
"0.50128436",
"0.5008486",
"0.5006087",
"0.5003595",
"0.50032616",
"0.50027853",
"0.49784127",
"0.49776942",
"0.49635237",
"0.4952802",
"0.49494582",
"0.4943046",
"0.4942057"
] | 0.59098107 | 6 |
This function layers `middleware` left to right around the `handler` and calls it all with `ctx` as an argument. Setting `verbose` to `True` prints when handlers start their before and after sections. | def wrap_and_call(ctx, handler, *middleware, verbose=False):
global _VERBOSE_MODE
_VERBOSE_MODE = verbose
middleware_ = list(middleware)
return compose(*reversed(middleware_))(handler)(ctx) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _print_inwards(middleware_name):\n if _VERBOSE_MODE:\n print('{}--->'.format(middleware_name))",
"def _print_outwards(middleware_name):\n if _VERBOSE_MODE:\n print('<---{}'.format(middleware_name))",
"def middleware(name, *args, **kwargs):\n\n def new_annotate(g_fn):\n def new_middleware(handler):\n def new_handler(ctx):\n _print_inwards(name)\n\n g = g_fn(ctx, *args, **kwargs)\n\n changed_ctx = next(g)\n new_ctx = handler(changed_ctx)\n last_ctx = g.send(new_ctx)\n\n _print_outwards(name)\n\n return last_ctx\n\n return new_handler\n\n return new_middleware\n\n return new_annotate",
"def wrap_in_middleware(app, global_conf, application_stack, **local_conf):\n stack = application_stack\n # Merge the global and local configurations\n conf = global_conf.copy()\n conf.update(local_conf)\n debug = asbool(conf.get('debug', False))\n # First put into place httpexceptions, which must be most closely\n # wrapped around the application (it can interact poorly with\n # other middleware):\n app = wrap_if_allowed(app, stack, httpexceptions.make_middleware, name='paste.httpexceptions', args=(conf,))\n # The recursive middleware allows for including requests in other\n # requests or forwarding of requests, all on the server side.\n if asbool(conf.get('use_recursive', True)):\n from paste import recursive\n app = wrap_if_allowed(app, stack, recursive.RecursiveMiddleware, args=(conf,))\n # Various debug middleware that can only be turned on if the debug\n # flag is set, either because they are insecure or greatly hurt\n # performance\n if debug:\n # Middleware to check for WSGI compliance\n if asbool(conf.get('use_lint', True)):\n from paste import lint\n app = wrap_if_allowed(app, stack, lint.make_middleware, name='paste.lint', args=(conf,))\n # Middleware to run the python profiler on each request\n if asbool(conf.get('use_profile', False)):\n import profile\n app = wrap_if_allowed(app, stack, profile.ProfileMiddleware, args=(conf,))\n # Middleware that intercepts print statements and shows them on the\n # returned page\n if asbool(conf.get('use_printdebug', True)):\n from paste.debug import prints\n app = wrap_if_allowed(app, stack, prints.PrintDebugMiddleware, args=(conf,))\n if debug and asbool(conf.get('use_interactive', False)):\n # Interactive exception debugging, scary dangerous if publicly\n # accessible, if not enabled we'll use the regular error printing\n # middleware.\n try:\n from weberror import evalexception\n app = wrap_if_allowed_or_fail(app, stack, evalexception.EvalException,\n args=(conf,),\n kwargs=dict(templating_formatters=build_template_error_formatters()))\n except MiddlewareWrapUnsupported as exc:\n log.warning(str(exc))\n import galaxy.web.framework.middleware.error\n app = wrap_if_allowed(app, stack, galaxy.web.framework.middleware.error.ErrorMiddleware, args=(conf,))\n else:\n # Not in interactive debug mode, just use the regular error middleware\n import galaxy.web.framework.middleware.error\n app = wrap_if_allowed(app, stack, galaxy.web.framework.middleware.error.ErrorMiddleware, args=(conf,))\n # Transaction logging (apache access.log style)\n if asbool(conf.get('use_translogger', True)):\n from paste.translogger import TransLogger\n app = wrap_if_allowed(app, stack, TransLogger)\n # X-Forwarded-Host handling\n from galaxy.web.framework.middleware.xforwardedhost import XForwardedHostMiddleware\n app = wrap_if_allowed(app, stack, XForwardedHostMiddleware)\n return app",
"def load_middleware(*args, **kwargs):\n inject_middleware()\n BaseHandler.load_middleware = original_load_middleware\n return original_load_middleware(*args, **kwargs)",
"async def test_middleware_multiple(self, dm):\n\n async def _first(ctx, responder, handler):\n responder.frame[\"middles\"] = responder.frame.get(\"middles\", []) + [\"first\"]\n await handler(ctx, responder)\n\n async def _second(ctx, responder, handler):\n responder.frame[\"middles\"] = responder.frame.get(\"middles\", []) + [\"second\"]\n await handler(ctx, responder)\n\n async def _handler(ctx, responder):\n # '_first' should have been called first, then '_second'\n assert responder.frame[\"middles\"] == [\"first\", \"second\"]\n\n dm.add_middleware(_first)\n dm.add_middleware(_second)\n dm.add_dialogue_rule(\"middleware_test\", _handler, intent=\"middle\")\n ctx = create_request(\"domain\", \"middle\")\n result = await dm.apply_handler(ctx, create_responder(ctx))\n assert result.dialogue_state == \"middleware_test\"",
"def _inject_trace_middleware_to_args(trace_middleware, args, kwargs):\n # type: (Callable, Tuple, Dict) -> Tuple[Tuple, Dict]\n middlewares_arg = 8\n if _graphql_version >= (3, 2):\n # middleware is the 10th argument graphql.execute(..) version 3.2+\n middlewares_arg = 9\n\n # get middlewares from args or kwargs\n try:\n middlewares = get_argument_value(args, kwargs, middlewares_arg, \"middleware\") or []\n if isinstance(middlewares, MiddlewareManager):\n # First we must get the middlewares iterable from the MiddlewareManager then append\n # trace_middleware. For the trace_middleware to be called a new MiddlewareManager will\n # need to initialized. This is handled in graphql.execute():\n # https://github.com/graphql-python/graphql-core/blob/v3.2.1/src/graphql/execution/execute.py#L254\n middlewares = middlewares.middlewares # type: Iterable\n except ArgumentError:\n middlewares = []\n\n # Note - graphql middlewares are called in reverse order\n # add trace_middleware to the end of the list to wrap the execution of resolver and all middlewares\n middlewares = list(middlewares) + [trace_middleware]\n\n # update args and kwargs to contain trace_middleware\n args, kwargs = set_argument_value(args, kwargs, middlewares_arg, \"middleware\", middlewares)\n return args, kwargs",
"async def test_middleware_single(self, dm):\n\n async def _middle(request, responder, handler):\n responder.middle = True\n await handler(request, responder)\n\n async def _handler(request, responder):\n assert responder.middle\n responder.handler = True\n\n dm.add_middleware(_middle)\n dm.add_dialogue_rule(\"middleware_test\", _handler, intent=\"middle\")\n request = create_request(\"domain\", \"middle\")\n response = create_responder(request)\n result = await dm.apply_handler(request, response)\n assert result.dialogue_state == \"middleware_test\"\n assert result.handler",
"def middleware_after(self):\n pass",
"async def raven_middleware(app, handler):\n async def middleware_handler(request):\n try:\n return await handler(request)\n except aiohttp.web.HTTPClientError:\n # Do not capture client errors\n raise\n except Exception:\n raven_client.captureException()\n raise\n return middleware_handler",
"def instrument_flask():\n oc_trace_config = app.config.get('OPENCENSUS_TRACE', {})\n oc_trace_config.update({\n 'EXPORTER': trace_exporter.TraceExporter,\n 'PROPAGATOR': trace_context_http_header_format.TraceContextPropagator\n })\n app.config.update(OPENCENSUS_TRACE=oc_trace_config)\n return flask_middleware.FlaskMiddleware(app)",
"def main(ctx, debug):\n if debug:\n logger.setLevel(logging.DEBUG)",
"def named(name):\n\n def new_annotate(mware):\n def new_middleware(handler):\n\n new_handler = mware(handler)\n\n def verbose_handler(ctx):\n _print_inwards(name)\n\n new_ctx = new_handler(ctx)\n\n _print_outwards(name)\n\n return new_ctx\n\n return verbose_handler\n\n return new_middleware\n\n return new_annotate",
"def on_load_middleware():\n\n # protect middleware wrapping: only a single thread proceeds\n global load_middleware_lock # lock gets overwritten as None after init\n if not load_middleware_lock: # already initialized? abort\n return\n mwlock = load_middleware_lock\n mwlock.acquire() # acquire global lock\n if not load_middleware_lock: # check again\n mwlock.release() # abort\n return\n load_middleware_lock = None # mark global as \"init done\"\n\n try:\n # middleware hooks\n from django.conf import settings\n for i in settings.MIDDLEWARE_CLASSES:\n if i.startswith('oboe'):\n continue\n dot = i.rfind('.')\n if dot < 0 or dot+1 == len(i):\n continue\n objname = i[dot+1:]\n imports.whenImported(i[:dot],\n functools.partial(middleware_hooks, objname=objname)) # XXX Not Python2.4-friendly\n\n # ORM\n if oboe.config['inst_enabled']['django_orm']:\n from oboeware import inst_django_orm\n imports.whenImported('django.db.backends', inst_django_orm.wrap)\n\n # templates\n if oboe.config['inst_enabled']['django_templates']:\n from oboeware import inst_django_templates\n import django\n if StrictVersion(django.get_version()) >= StrictVersion('1.3'):\n imports.whenImported('django.template.base', inst_django_templates.wrap)\n else:\n imports.whenImported('django.template', inst_django_templates.wrap)\n\n # load pluggaable instrumentation\n from loader import load_inst_modules\n load_inst_modules()\n\n # it's usually a tuple, but sometimes it's a list\n if type(settings.MIDDLEWARE_CLASSES) is tuple:\n settings.MIDDLEWARE_CLASSES = ('oboeware.djangoware.OboeDjangoMiddleware',) + settings.MIDDLEWARE_CLASSES\n elif type(settings.MIDDLEWARE_CLASSES) is list:\n settings.MIDDLEWARE_CLASSES = ['oboeware.djangoware.OboeDjangoMiddleware'] + settings.MIDDLEWARE_CLASSES\n else:\n print >> sys.stderr, \"Oboe error: thought MIDDLEWARE_CLASSES would be either a tuple or a list, got \" + \\\n str(type(settings.MIDDLEWARE_CLASSES))\n\n finally: # release instrumentation lock\n mwlock.release()\n\n try:\n add_rum_template_tags()\n except Exception, e:\n print >> sys.stderr, \"Oboe error: couldn't add RUM template tags: %s\" % (e,)",
"def test_middleware_loads(self):\n self.client.get(\"/__debug__\")",
"def log_meta_context(**kwargs):\n if not hasattr(_meta_local, 'meta'):\n _meta_local.meta = []\n\n if len(_meta_local.meta):\n # Seems to be a nested context. Include meta from the parent\n # context\n d = _meta_local.meta[-1].to_dict()\n d.update(kwargs)\n kwargs = d\n\n _meta_local.meta.append(LogMeta(**kwargs))\n\n yield _meta_local.meta[-1]\n # Remove the current meta from the stack after the context exits\n _meta_local.meta.pop()",
"def test_continue_on_early_trace_ending(context):\n env = get_env(\n {\n \"AWS_LAMBDA_FUNCTION_NAME\": \"finishing_spans_early_handler\",\n \"DD_LAMBDA_HANDLER\": \"tests.contrib.aws_lambda.handlers.finishing_spans_early_handler\",\n }\n )\n\n with override_env(env):\n patch()\n\n datadog(finishing_spans_early_handler)({}, context())",
"def main(ctx, verbose):\n ctx.ensure_object(dict)\n if verbose:\n log_level = 'DEBUG'\n else:\n log_level = 'INFO'\n\n init_logger('reVX', log_level=log_level)",
"def verbose(ctx, msg, *args):\n if ctx.verbose:\n info(msg, *args)",
"def apply_middleware(self, page, html):\n for middleware in self.middleware:\n html = middleware(page, html)\n return html",
"def ContextLog(logger, oline, cline):\n logger('{}...'.format(oline))\n yield\n logger('{}.'.format(cline))",
"async def pre_middleware(\n self,\n event: T_contra,\n context_variables: Optional[dict] = None,\n ) -> Optional[List[BaseMiddleware]]:\n mw_instances = []\n\n for middleware in self.middlewares:\n mw_instance = middleware(event, view=self)\n await mw_instance.pre()\n if not mw_instance.can_forward:\n logger.debug(\"{} pre returned error {}\", mw_instance, mw_instance.error)\n return None\n\n mw_instances.append(mw_instance)\n\n if context_variables is not None:\n context_variables.update(mw_instance.context_update)\n\n return mw_instances",
"async def test_async_middleware(dm):\n\n async def _middle(request, responder, handler):\n responder.middle = True\n await handler(request, responder)\n\n async def _handler(request, responder):\n assert responder.middle\n responder.handler = True\n\n dm.add_middleware(_middle)\n dm.add_dialogue_rule(\"middleware_test\", _handler, intent=\"middle\")\n request = create_request(\"domain\", \"middle\")\n response = create_responder(request)\n result = await dm.apply_handler(request, response)\n dm.apply_handler(request, response)\n assert result.dialogue_state == \"middleware_test\"\n assert result.handler",
"def trace(filler):\n @decorator\n def dec(func):\n def wrapper(*args, **kwargs):\n indent = filler * wrapper.level\n arguments = ', '.join(str(x) for x in args)\n print('{} --> {}({})'.format(indent, func.__name__, arguments))\n wrapper.level += 1\n\n result = func(*args, **kwargs)\n print('{} <-- {}({}) == {}'.format(indent, func.__name__, arguments, result))\n wrapper.level -= 1\n return result\n wrapper.level = 0\n return wrapper\n return dec",
"def QueryParametersMidware(event, context):\n\n def decorator_function(func):\n \"\"\"Decorator: performs query parameter checking and passes response\n\n Arguments:\n func (function): inner function that takes the response\n\n Returns:\n (function): wrapped function\n \"\"\"\n\n def wrapper(resp):\n \"\"\"Inner function: performs media type checking and passes response\n\n Arguments:\n resp (Response): Response object\n \n Returns:\n (Response): Response object, modified by query parameter midware\n \"\"\"\n\n # perform query param middleware function, which modifies the \n # response status code/headers/body as necessary\n # if status code is still OK at end of function, then execute the\n # inner function\n QueryParametersMW.middleware_func(event, resp)\n if resp.get_status_code() == SC.OK:\n return func(resp)\n else:\n return resp\n \n return wrapper\n\n return decorator_function",
"def _app(ctx, logfile, verbose):\n log_levels = {\n 0: logging.WARNING,\n 1: logging.INFO,\n 2: logging.DEBUG,\n }\n loglevel = log_levels.get(verbose, logging.DEBUG)\n # TODO more flexible logging config\n logging.basicConfig(format='%(name)s: %(levelname)s: %(message)s',\n level=loglevel, filename=logfile)\n\n tasks = ctx.obj['tasks']\n tasks.context = ctx",
"def _log_nested_outer(self):\n def _log_nested_inner():\n logging.info('info nested')\n return _log_nested_inner",
"def session_thread(**kwargs):\n mw = Middleware()\n try:\n mw.on_request_start()\n except Exception:\n mw.on_request_error('')\n finally:\n mw.on_response()",
"def context(subcontext=None) -> None:\n if subcontext is None:\n subcontext = []\n args = subcontext\n\n if len(args) == 0:\n args = config_context_sections.split()\n\n sections = [(\"legend\", lambda *args, **kwargs: [M.legend()])] if args else []\n sections += [(arg, context_sections.get(arg[0], None)) for arg in args]\n\n result = defaultdict(list)\n result_settings: DefaultDict[str, dict] = defaultdict(dict)\n for section, func in sections:\n if func:\n target = output(section)\n # Last section of an output decides about output settings\n settings = output_settings.get(section, {})\n result_settings[target].update(settings)\n with target as out:\n result[target].extend(\n func(\n target=out,\n width=settings.get(\"width\", None),\n with_banner=settings.get(\"banner_top\", True),\n )\n )\n\n for target, res in result.items():\n settings = result_settings[target]\n if len(res) > 0 and settings.get(\"banner_bottom\", True):\n with target as out:\n res.append(pwndbg.ui.banner(\"\", target=out, width=settings.get(\"width\", None)))\n\n for target, lines in result.items():\n with target as out:\n if result_settings[target].get(\"clearing\", config_clear_screen) and lines:\n clear_screen(out)\n out.write(\"\\n\".join(lines))\n if out is sys.stdout:\n out.write(\"\\n\")\n out.flush()",
"def handle_request(request, *args, **kwargs):\n root = request.route.grit_params[\"root\"]\n logger.info(\"WSGIApplication::handle_request path: %s method: %s\", request.path_qs, request.method)\n\n reqctx = RequestCtx(request, request.response, kwargs)\n def run_pipeline(l):\n if l:\n handler_cls = l.pop(0)\n logger.debug(\"running pipeline entry %s\", handler_cls)\n with handler_cls.begin(reqctx):\n if 0 <= reqctx.response.status_int <= 299:\n run_pipeline(l)\n\n run_pipeline(list(root.pipeline))\n\n rv = request.response\n if isinstance(rv, basestring):\n rv = webapp2.Response(rv)\n elif isinstance(rv, tuple):\n rv = webapp2.Response(*rv)\n request.response = rv\n logger.debug(\"Pipeline completed with response status %s\", rv.status)",
"def caplog(_caplog):\n class PropogateHandler(logging.Handler):\n def emit(self, record):\n logging.getLogger(record.name).handle(record)\n\n handler_id = logger.add(PropogateHandler(), format=\"{message} {extra}\")\n yield _caplog\n logger.remove(handler_id)",
"def generate_viewfunc(final_viewfunc, middlewares):\n accepted_kwargs = []\n for param in inspect.signature(final_viewfunc).parameters.values():\n if param.kind == param.POSITIONAL_ONLY:\n raise ValueError(\"%s expects positional argument %s\"\n % (final_viewfunc, param.name))\n elif param.kind == param.VAR_POSITIONAL:\n raise ValueError(\"%s expects var-positional argument %s\"\n % (final_viewfunc, param.name))\n elif param.kind == param.VAR_KEYWORD:\n raise ValueError(\"%s expects var-keyword argument %s\"\n % (final_viewfunc, param.name))\n\n accepted_kwargs.append(param.name)\n\n wants_ldap = 'ldap' in accepted_kwargs\n\n def caller():\n kwargs = {\n \"log\": APP.logger,\n }\n\n for middleware in middlewares:\n output = middleware.request_infos()\n if output:\n APP.logger.debug(\"Middleware %s generated kwargs: %s\",\n middleware,\n output)\n kwargs.update(output)\n\n result = middleware.intermediate_viewfunc()\n if result is not None:\n APP.logger.debug(\"Middleware %s returned: %s\",\n middleware,\n result)\n return result\n\n # Build the LDAP client\n if wants_ldap:\n ldap_client = LdapClient(\n APP.logger,\n kwargs.get('user_tokeninfo'),\n kwargs.get('client_info'),\n )\n kwargs['ldap'] = ldap_client\n\n APP.logger.debug(\"Got args %s for viewfunc %s pre-filter\",\n kwargs,\n final_viewfunc)\n\n kwargs = {key: kwargs[key] for key in kwargs if key in accepted_kwargs}\n\n APP.logger.debug(\"Calling final viewfunc %s with args %s\",\n final_viewfunc,\n kwargs)\n res = final_viewfunc(**kwargs)\n resp = {}\n if isinstance(res, tuple):\n res, resp = res\n resp['result'] = res\n\n headers = {\n \"Content-Security-Policy\": \"default-src: 'none'\",\n \"Feature-Policy\": \"\",\n \"X-Frame-Options\": \"DENY\",\n \"X-Xss-Protection\": \"1; mode=block\",\n \"X-Content-Type-Options\": \"nosniff\",\n \"Referrer-Policy\": \"no-referrer\",\n }\n for middleware in middlewares:\n new_resp = middleware.manipulate_response(resp, kwargs)\n extra_headers = None\n if new_resp and isinstance(new_resp, tuple):\n new_resp, extra_headers = new_resp\n if new_resp is not None:\n APP.logger.debug(\"Middleware %s manipulated response\",\n middleware)\n resp = new_resp\n if extra_headers is not None:\n APP.logger.debug(\"Middleware %s added headers: %s\",\n middleware, extra_headers)\n headers.update(extra_headers)\n\n return resp, headers\n return caller",
"def lambda_handler(event, context):\n\n mytime, lambda_name, env_vars = lambda_init.init_lambda(context)\n stage = env_vars[\"stage\"]\n consumer_master_past_lambda = env_vars[\"consumer_master_past_name\"]\n\n apps, test_params = init_apps_from_test_params(event)\n filters = init_filters()\n\n step = generate_step_from_mytime(mytime)\n\n print(\"step:\", step)\n for app in apps:\n advance_app_timestamp(app, step)\n\n consumer_event = {}\n\n # Invoke the consumer-master lambda for each app in apps\n for app in apps:\n headers = Headers(\n shadowreader_type=\"past\", stage=stage, app=app, step=step\n ).headers\n\n consumer_event = {\n \"app\": app.name,\n \"identifier\": app.identifier,\n \"base_url\": app.base_url,\n \"cur_timestamp\": app.cur_timestamp,\n \"rate\": app.rate,\n \"baseline\": app.baseline,\n \"parent_lambda\": lambda_name,\n \"child_lambda\": consumer_master_past_lambda,\n \"headers\": headers,\n \"filters\": filters,\n }\n invoke_func(consumer_event, func=consumer_master_past_lambda)\n\n if apps and consumer_event:\n print_to_logs(consumer_event, apps)\n\n # Collect metrics and put metrics into CW\n metrics = []\n for app in apps:\n # This is the timestamp (in epoch time) that is being replayed\n # by the load test.\n metric = {\n \"name\": \"replayed_timestamp\",\n \"stage\": stage,\n \"lambda_name\": lambda_name,\n \"app\": app.name,\n \"identifier\": app.identifier,\n \"mytime\": mytime,\n \"val\": app.cur_timestamp,\n }\n metrics.append(metric)\n\n if sr_plugins.exists(\"metrics\"):\n metric_emitter = sr_plugins.load(\"metrics\")\n for metric in metrics:\n metric_emitter.main(metric)\n\n cur_params = {\"apps\": apps, \"filters\": filters, \"test_params\": test_params}\n\n if sr_plugins.exists(\"test_params_emitter\"):\n params_emitter = sr_plugins.load(\"test_params_emitter\")\n params_emitter.main(\n cur_params,\n lambda_name,\n mytime,\n stage,\n env_vars,\n sr_config,\n sr_plugins._sr_plugins,\n )\n\n return json.dumps(cur_params, default=str), json.dumps(consumer_event, default=str)",
"def error_handler_middleware(app):\n def wsgi_app(environ, start_response):\n try:\n return app(environ, start_response)\n except Exception, e:\n logging.exception(e)\n # ... display a custom error message ...\n response = webapp.Response()\n response.set_status(500)\n response.out.write('Ooops! An error occurred...')\n response.wsgi_write(start_response)\n return ['']\n\n return wsgi_app",
"def main(ctx, verbose):\n # Initialize context as empty dict to store data sent from core to commands.\n ctx.obj = {}\n # Enable verbose debug output if required.\n if verbose:\n logging.basicConfig(level=logging.DEBUG)",
"async def intermediate(self, ctx):\n await ctx.send(f'Testing intermediate')",
"def add(self, middleware):\n pass # pragma: no cover",
"def add_microstep_hook(h):\n add_hook(microstep, h)",
"def _context(name, func):\n\tpush_aspect(name, func)\n\tyield\n\tpop_aspect(name)",
"def main(ctx, **kwargs):\n ctx.meta['decode-errors'] = kwargs['errors'] or 'strict'\n ctx.meta['output-logmsg'] = kwargs['logmsg'] or 'normal'",
"def stk_logger(context, msg: str):\n if not context:\n logger.info(msg)\n return\n uc = context.use_case()\n if (msg[:2] != \"->\") or (uc == \"\"):\n logger.info(msg)\n return\n logger.info(f\"-> {msg[2:]} uc={uc}\")\n return",
"def main(ctx, verbose):\n return",
"def setup():\n global log_handler\n\n if vaex.settings.main.logging.setup:\n logger.setLevel(logging.DEBUG)\n\n # create console handler and accept all loglevels\n if vaex.settings.main.logging.rich:\n from rich.logging import RichHandler\n log_handler = RichHandler()\n else:\n log_handler = logging.StreamHandler()\n\n # create formatter\n formatter = logging.Formatter('%(levelname)s:%(threadName)s:%(name)s:%(message)s')\n\n\n # add formatter to console handler\n log_handler.setFormatter(formatter)\n log_handler.setLevel(logging.DEBUG)\n\n # add console handler to logger\n logger.addHandler(log_handler)\n\n logging.getLogger(\"vaex\").setLevel(logging.ERROR) # default to higest level\n _set_log_level(vaex.settings.main.logging.error, logging.ERROR)\n _set_log_level(vaex.settings.main.logging.warning, logging.WARNING)\n _set_log_level(vaex.settings.main.logging.info, logging.INFO)\n _set_log_level(vaex.settings.main.logging.debug, logging.DEBUG)\n # VAEX_DEBUG behaves similar to VAEX_LOGGING_DEBUG, but has more effect\n DEBUG_MODE = os.environ.get('VAEX_DEBUG', '')\n if DEBUG_MODE:\n _set_log_level(DEBUG_MODE, logging.DEBUG)",
"def cli(ctx):\n\n if ctx.invoked_subcommand is None:\n app.log_to_papertrail('app')\n runserver()\n else:\n app.log_to_papertrail('ctx.invoked_subcommand')",
"def test_main():\n info(\"hi\")\n debug(\"shouldn't appear\")\n set_level(DEBUG)\n debug(\"should appear\")\n folder = \"/tmp/testlogging\"\n if os.path.exists(folder):\n shutil.rmtree(folder)\n configure(folder=folder)\n logkv(\"a\", 3)\n logkv(\"b\", 2.5)\n dumpkvs()\n logkv(\"b\", -2.5)\n logkv(\"a\", 5.5)\n dumpkvs()\n info(\"^^^ should see a = 5.5\")\n logkv_mean(\"b\", -22.5)\n logkv_mean(\"b\", -44.4)\n logkv(\"a\", 5.5)\n dumpkvs()\n with ScopedConfigure(None, None):\n info(\"^^^ should see b = 33.3\")\n\n with ScopedConfigure(\"/tmp/test-logger/\", [\"json\"]):\n logkv(\"b\", -2.5)\n dumpkvs()\n\n reset()\n logkv(\"a\", \"longasslongasslongasslongasslongasslongassvalue\")\n dumpkvs()\n warn(\"hey\")\n error(\"oh\")\n logkvs({\"test\": 1})",
"def log_func_edges(func):\n\n @functools.wraps(func)\n def wrapper(*args, **kwargs):\n \"\"\"Generic wrapper function.\"\"\"\n logging.info(f\"Entering `{func.__name__}` for processing...\")\n results = func(*args, **kwargs)\n logging.info(f\"Exiting processing for `{func.__name__}`\")\n return results\n\n return wrapper",
"def trace(func):\n @wraps(func)\n def tracer(*args, **kwargs):\n name = func.__name__\n stack_size = int(len(inspect.stack(0)) / 2) # @wraps(func) is also increasing the size\n indent = stack_size*'\\t'\n print(f'{indent} > Entering \"{name}\": args: {args}')\n result = func(*args, **kwargs)\n print(f'{indent} < Leaving \"{name}\"')\n return result\n\n return tracer",
"async def _noop_error_handler(ctx: \"RequestContext\") -> None:",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def handler_block(obj, handler_id): # reliably restored by inspect\n pass",
"def RequestHandler_start(self):\n # check for X-Trace header in HTTP request\n ctx, ev = oboe.Context.start_trace('tornado', xtr=self.request.headers.get(\"X-Trace\"), avw=self.request.headers.get(\"X-TV-Meta\"))\n\n if hasattr(self, '__class__') and hasattr(self.__class__, '__name__'):\n ev.add_info(\"Controller\", self.__class__.__name__)\n ev.add_info(\"Action\", self.request.method.lower())\n ev.add_info(\"URL\", self.request.uri)\n ev.add_info(\"Method\", self.request.method)\n ev.add_info(\"HTTP-Host\", self.request.host)\n ctx.report(ev)\n\n # create & store finish event for reporting later\n self.request._oboe_ctx = ctx\n self.request._oboe_finish_ev = ctx.create_event('exit', 'tornado') # adds edge from exit event -> enter event's md\n\n # report the exit event ID in the response header\n self.set_header(\"X-Trace\", self.request._oboe_finish_ev.id())",
"def register_middleware(self, middleware, opts={}):\n self._middleware.append((middleware, opts))",
"def dispatch(self, context, consumed, handler, is_endpoint):\n\t\t\n\t\trequest = context.request\n\t\t\n\t\tif __debug__:\n\t\t\tlog.debug(\"Handling dispatch event.\", extra=dict(\n\t\t\t\t\trequest = id(context),\n\t\t\t\t\tconsumed = consumed,\n\t\t\t\t\thandler = safe_name(handler),\n\t\t\t\t\tendpoint = is_endpoint\n\t\t\t\t))\n\t\t\n\t\t# The leading path element (leading slash) requires special treatment.\n\t\tif not consumed and context.request.path_info_peek() == '':\n\t\t\tconsumed = ['']\n\t\t\n\t\tnConsumed = 0\n\t\tif consumed:\n\t\t\t# Migrate path elements consumed from the `PATH_INFO` to `SCRIPT_NAME` WSGI environment variables.\n\t\t\tif not isinstance(consumed, (list, tuple)):\n\t\t\t\tconsumed = consumed.split('/')\n\t\t\t\n\t\t\tfor element in consumed:\n\t\t\t\tif element == context.request.path_info_peek():\n\t\t\t\t\tcontext.request.path_info_pop()\n\t\t\t\t\tnConsumed += 1\n\t\t\t\telse:\n\t\t\t\t\tbreak\n\t\t\n\t\t# Update the breadcrumb list.\n\t\tcontext.path.append(Crumb(handler, Path(request.script_name)))\n\t\t\n\t\tif consumed: # Lastly, update the remaining path element list.\n\t\t\trequest.remainder = request.remainder[nConsumed:]",
"def trace_xray_subsegment(skip_args=False):\n\n @wrapt.decorator\n def wrapper(wrapped, instance, args, kwargs):\n metadata_extractor = (\n noop_function_metadata if skip_args else extract_function_metadata\n )\n return generic_xray_wrapper(\n wrapped,\n instance,\n args,\n kwargs,\n name=get_function_name,\n namespace=\"local\",\n metadata_extractor=metadata_extractor,\n )\n\n return wrapper",
"def contextLog(logger, level=logging.INFO):\n def decorator(func):\n @wraps(func)\n def wrapper(*args, **kw):\n logger.log(level,\n \"Enter {func}\".format(func=func.__name__))\n\n out = func(*args, **kw)\n\n logger.log(level, \"Exit {func}\".format(func=func.__name__))\n return out\n return wrapper\n return decorator",
"def middleware(self, *args, **kwargs):\n return super(Blueprint, self).middleware(*args, **kwargs)",
"def test_multiple_headers_sent(self):\n\n def app(environ, start_response):\n start_response(\"200 Ok\", [(\"Content-Type\", \"text/plain\")])\n start_response(\"201 Changed My Mind\", [(\"Content-Type\", \"text/plain\")])\n yield \"Hello world\"\n\n event = {\n \"httpMethod\": \"POST\",\n \"path\": \"/\",\n \"queryStringParameters\": None,\n \"headers\": {\n \"Host\": \"localhost\",\n },\n \"body\": None\n }\n context = DummyContext()\n\n with self.assertRaisesRegexp(Exception, \"Second call to start_response must include exc_info\"):\n Handler(app)(event, context)",
"def logWithContext(**context):\n destination = get_destination(context)\n def _log(message=\"\", **kwargs):\n myContext = {}\n myContext.update(context)\n myContext.update(kwargs)\n log(message, **myContext)\n return _log",
"def _resolver_middleware(next_middleware, root, info, **args):\n pin = Pin.get_from(graphql)\n if not pin or not pin.enabled():\n return next_middleware(root, info, **args)\n\n with pin.tracer.trace(\n name=\"graphql.resolve\",\n resource=info.field_name,\n span_type=SpanTypes.GRAPHQL,\n ) as span:\n span.set_tag_str(COMPONENT, config.graphql.integration_name)\n\n return next_middleware(root, info, **args)",
"def main():\n logger = setup_logger()\n\n logger.debug('a debug message')\n logger.info('an info message')\n logger.warning('a warning message')\n logger.error('an error message')\n logger.critical('a critical message')",
"def trace(self, pattern, handler):\n return self.route(TRACE, pattern, handler)",
"def main(context, verbose):\n if verbose:\n click.echo(\"Starting operation...\")\n context.obj = {\n 'verbose': verbose,\n }",
"def _main(args):\n handler = args.handler\n del args.handler\n\n delim = args.delimiter\n del args.delimiter\n\n def output(value):\n if value is not None:\n sys.stdout.buffer.write(value)\n sys.stdout.buffer.write(bytearray([delim]))\n sys.stdout.flush()\n\n args.output = output\n args.elf = Elf(args.elf)\n\n handler(**vars(args))",
"def make_middleware_stack(middleware, base):\n for ware in reversed(middleware):\n base = ware(base)\n return base",
"def middleware(self, *args):\n for arg in args:\n if arg and arg not in self.list_middleware:\n self.list_middleware.append(arg)\n return self",
"def parse_handler_template(request, handler, args, kwargs):\n args = request.path[1:].split('/')\n\n dict_path = {}\n for index, value in enumerate(args):\n if (len(value) == 0):\n continue\n dict_path[index] = value\n\n module = dict_path.get(0, spy_setting.DEFAULT_MODULE)\n controller = dict_path.get(1, spy_setting.DEFAULT_CONTROLLER)\n action = dict_path.get(2, spy_setting.DEFAULT_ACTION)\n action = (action[0]).upper() + action[1:]\n\n kwargs = {'module': module, 'controller': controller, 'action': action}\n # logging.info(kwargs)\n # logging.info(args)\n counter = 3\n while True:\n key = dict_path.get(counter, None)\n val = dict_path.get(counter+1, None)\n if key is not None:\n request.GET.add(key, val)\n counter += 2\n else:\n break\n\n def sub(match):\n return kwargs.get(match.group().strip('{}'))\n\n return re.sub('{.*?}', sub, handler), args, kwargs",
"def add_cont_hook(h):\n add_hook(cont, h)",
"def test_invoke_processor_wrapper_positional(caplog):\n\n testapp = holocron.Application()\n marker = None\n\n def processor(app, items):\n yield from items\n\n def processor_wrapper(app, items, processor, secret):\n nonlocal marker\n marker = secret\n yield from app.invoke([processor])\n\n testapp.add_processor(\"processor\", processor)\n testapp.add_processor_wrapper(\"wrapper\", processor_wrapper)\n\n for _ in testapp.invoke([{\"name\": \"processor\", \"wrapper\": [42]}]):\n pass\n\n assert marker == 42\n assert len(caplog.records) == 0",
"def write_debug_info(self):\n #path = self.request.uri.split('?')[0]\n #method = path.split('/')[-1]\n \n self.write(\"Handler: \" + str(self.__class__.__name__)+\"<br>\")\n self.write(\"<hr>\")\n self.write(str(dir(self.request)))\n self.write(\"<br><hr>\")\n self.write(\"query_arguments:\" + str(self.request.query_arguments))\n self.write(\"<br>\")\n self.write(\"uri:\" + self.uri)\n self.write(\"<br>\")\n self.write(\"path:\" + self.path)\n self.write(\"<br>\")\n self.write(\"method to call: \" + self.request.method.lower() + \"_\" + self.method)\n self.write(\"<hr>\")\n self.write(\"request method: \" + self.request.method)\n self.write(\"<hr>\")\n self.write(\"request headers: \" + str(self.request.headers))\n self.write(\"<hr>\")\n self.flush()",
"def inner(func):\r\n\r\n service = func.__qualname__.split(\".\")[0]\r\n _Router().add_route(\r\n service=service,\r\n grpc_method=func.__name__,\r\n url_path=url,\r\n http_method=method\r\n )\r\n if pre_request is not None and len(pre_request) > 0:\r\n _MiddlewareManager().add_route_pre_middleware(pre_request, url)\r\n if pos_request is not None and len(pos_request) > 0:\r\n _MiddlewareManager().add_route_pre_middleware(pos_request, url)\r\n return func",
"def simple_handler(request):\n logger.debug('')\n return Response(200, 'OK', {}, '')",
"def IncludeSessionHandler(handler, contextPath=\"/\"):\n context = ContextHandler(contextPath)\n manager = HashSessionManager()\n sessions = SessionHandler(manager)\n\n context.setHandler(sessions)\n sessions.setHandler(handler)\n return context",
"def log_function_invocation(fx):\r\n \r\n def wrapper(self, *args, **kwargs):\r\n logger.debug( \"Entering: \" + fx.__name__ )\r\n r = fx(self, *args, **kwargs)\r\n logger.debug( \"Exited: \" + fx.__name__ )\r\n \r\n return r\r\n return wrapper",
"def test_invoke_processor_wrapper(caplog):\n\n testapp = holocron.Application()\n marker = None\n\n def processor(app, items):\n yield from items\n\n def processor_wrapper(app, items, processor, *, secret):\n nonlocal marker\n marker = secret\n yield from app.invoke([processor])\n\n testapp.add_processor(\"processor\", processor)\n testapp.add_processor_wrapper(\"wrapper\", processor_wrapper)\n\n for _ in testapp.invoke([{\"name\": \"processor\", \"wrapper\": {\"secret\": 42}}]):\n pass\n\n assert marker == 42\n assert len(caplog.records) == 0",
"def add_logger_stdout(app):\n\n f = ContextFilter()\n app.logger.addFilter(f)\n\n stdout_handler = logging.StreamHandler(sys.stdout)\n FORMAT = '%(asctime)s %(hostname)s {0} :%(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'.format('Tester')\n formatter = logging.Formatter(FORMAT, datefmt='%Y-%m-%dT%H:%M:%S')\n stdout_handler.setFormatter(formatter)\n stdout_handler.setLevel(logging.INFO)\n stdout_handler._name = 'StreamHandler'\n app.logger.addHandler(stdout_handler)",
"def set_verbose_logger_handlers():\n # type: (None) -> None\n global _REGISTERED_LOGGER_HANDLERS\n formatter = logging.Formatter(\n '%(asctime)s %(levelname)s %(name)s:%(funcName)s:%(lineno)d '\n '%(message)s')\n formatter.default_msec_format = '%s.%03d'\n for handler in _REGISTERED_LOGGER_HANDLERS:\n handler.setFormatter(formatter)",
"def prepend_middleware(self, *args):\n for arg in args:\n if arg and arg not in self.list_middleware:\n self.list_middleware.insert(0, arg)\n return self",
"def caplog(_caplog): # noqa: F811\n\n class PropogateHandler(logging.Handler):\n def emit(self, record):\n logging.getLogger(record.name).handle(record)\n\n handler_id = logger.add(PropogateHandler(), format=\"{message} {extra}\")\n from nitpick import PROJECT_NAME\n\n logger.enable(PROJECT_NAME)\n yield _caplog\n logger.remove(handler_id)\n logger.disable(PROJECT_NAME)",
"def dispatch(environ, start_response):\n url_path = environ['PATH_INFO']\n print environ['PATH_INFO']\n if(url_path == '/alarms'):\n content = app.alarms(environ, start_response)\n\treturn content\n if(url_path == '/enodes'):\n content = app.enodeb(environ, start_response)\n return content\n if(url_path == '/perf'):\n content = app.perf(environ, start_response)\n return content\n if(url_path == '/hoa_son'):\n content = app.hoa_son(environ, start_response)\n return content\n if(url_path == '/hoa_w_son'):\n content = app.hoa_w_son(environ, start_response)\n return content\n if(url_path == '/anrs'):\n content = app.ANR(environ, start_response)\n return content\n if(url_path == '/post'):\n content = app.post(environ, start_response)\n return content\n else:\n\tcontent = app.application2(environ,start_response)\n\treturn content",
"def context_formatter(\n full_context: dict,\n *,\n flask_context: dict,\n schema_context: dict,\n model_context: dict,\n):\n sections = [(\"Flask\", flask_context)]\n if schema_context: # pragma: no cover\n sections.append((\"Schemas\", schema_context))\n if model_context: # pragma: no cover\n sections.append((\"Models\", model_context))\n\n additional_context_keys = (\n full_context.keys()\n - flask_context.keys()\n - schema_context.keys()\n - model_context.keys()\n )\n additional_context = {\n key: full_context[key] for key in additional_context_keys\n }\n if additional_context:\n sections.append((\"Additional\", additional_context))\n return \"\\n\".join([format_section(*section) for section in sections])",
"def _debug_wrap(func):\n\n def wrapper(*args, **kwargs):\n _debug_print(f\"{datetime.datetime.now()} - About to run: {func.__name__}\")\n ret_val = func(*args, **kwargs)\n _debug_print(f\"{datetime.datetime.now()} - Completed run: {func.__name__}\")\n return ret_val\n\n return wrapper",
"def benchmark(get_response):\n\n def middleware(request):\n\n # Start timer.\n start = time.time()\n\n # Performs the request\n response = get_response(request)\n\n # Elapsed time.\n delta = int((time.time() - start) * 1000)\n\n # Generate timing message.\n msg = f'time={delta}ms for path={request.path}'\n\n if delta > 1000:\n logger.warning(f\"\\n***\\n*** SLOW: {msg}\\n***\\a\")\n else:\n logger.info(f'{msg}')\n\n return response\n\n return middleware",
"def meta_info(environ, start_response, logger, handle):\n pass",
"def sample_handler(controller, msg, pkt):\n pass",
"def print_trace(view_func):\r\n @wraps(view_func, assigned=available_attrs(view_func))\r\n def _wrapped_view_func(request, *args, **kwargs):\r\n try:\r\n return view_func(request, *args, **kwargs)\r\n except:\r\n import traceback\r\n traceback.print_exc()\r\n return _wrapped_view_func"
] | [
"0.61028963",
"0.5979751",
"0.553887",
"0.5415598",
"0.5203609",
"0.5072327",
"0.5070068",
"0.50015277",
"0.4928767",
"0.4807581",
"0.47827762",
"0.4747526",
"0.46803856",
"0.46597835",
"0.46321198",
"0.46194658",
"0.4610729",
"0.4543063",
"0.4516026",
"0.45135337",
"0.45097077",
"0.45091388",
"0.4435848",
"0.44111866",
"0.43837216",
"0.4375146",
"0.43694988",
"0.43451947",
"0.43430796",
"0.43322003",
"0.43175143",
"0.43044186",
"0.42649424",
"0.4260437",
"0.42559808",
"0.42481774",
"0.42425668",
"0.4238448",
"0.42282224",
"0.42199117",
"0.42158073",
"0.42152354",
"0.4200473",
"0.41812727",
"0.41805857",
"0.4179843",
"0.4176458",
"0.4175146",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41710234",
"0.41643825",
"0.41605",
"0.41566893",
"0.41532692",
"0.41424304",
"0.41423526",
"0.41284752",
"0.4117574",
"0.41050562",
"0.4103503",
"0.40987593",
"0.40967453",
"0.40941054",
"0.40860495",
"0.40800476",
"0.40694508",
"0.40635473",
"0.40574062",
"0.4055942",
"0.40554526",
"0.40540266",
"0.40358666",
"0.403112",
"0.4029666",
"0.4027249",
"0.402704",
"0.40190604",
"0.4003495",
"0.39885202",
"0.39838558",
"0.39805877",
"0.39696082",
"0.39686373",
"0.396639",
"0.3966328"
] | 0.7304824 | 0 |
transforms airspace files from and to open air format kml (google earth) | def __init__(self, full_path_of_source=''):
if len(full_path_of_source) == 0:
full_path_of_source = fileopenbox(default=os.path.curdir, filetypes=["*.txt", "*.kml"])
if full_path_of_source is None:
print('Airspace conversion was aborted by the user')
quit()
# set template (this should not be changed)
self.full_path_kml_template = r'Thermal_Map_Template5.kml' # set template file here: Folder must be named "good" and "bad"
self.airspaces = [] # airspace container
self.kml_template = {'header': [], 'good': [], 'bad': [], # will be filled after loading template
'good_subdivided': {'head':[], 'placemark': [], 'tail': []},
'bad_subdivided': {'head':[], 'placemark': [], 'tail': []}}
self.txt_lines = [] # airspace file in open airspace format
self.kml_lines = [] # airspace file in kml format
""" handle conversion from and to KML / airspace format"""
if full_path_of_source.lower().endswith('.kml'):
self.kml_2_open_airspace_and_json_format(full_path_of_source)
if full_path_of_source.lower().endswith('.txt'):
self.open_airspace_format_2_kml(full_path_of_source)
self.plot_all() # works for now only for TXT input | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def open_airspace_format_2_kml(self, source_file_txt):\n # load template for kml file\n self.load_kml_template(self.full_path_kml_template)\n # load airspace source\n self.load_airspace_open_air_format(source_file_txt)\n\n self.kml_lines = self.kml_template['header']\n self.kml_lines.extend(self.kml_template['good_subdivided']['head'])\n # collect all A and B kml lines\n kml_A = []\n kml_B = []\n # transform airspaces and attach to A and B collect-lists\n for airspace in self.airspaces:\n airspace.make_kml_format(self.kml_template)\n if airspace.as_type == 'A':\n kml_A.extend(airspace.kml_lines)\n if airspace.as_type == 'B':\n kml_B.extend(airspace.kml_lines)\n\n self.kml_lines.extend(kml_A)\n self.kml_lines.extend(self.kml_template['good_subdivided']['tail'])\n # start B part\n self.kml_lines.extend(self.kml_template['bad_subdivided']['head'])\n self.kml_lines.extend(kml_B)\n self.kml_lines.extend(self.kml_template['bad_subdivided']['tail'])\n\n full_path_kml = source_file_txt[:-4] + '_converted.kml'\n # uisave dialog\n full_path_kml = filesavebox(default=full_path_kml, filetypes=\"*.kml\")\n if full_path_kml is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n # write to file\n f = open(full_path_kml, 'w')\n f.writelines(self.kml_lines)\n f.close()\n print('Resulting KML files was saved to: %s' % full_path_kml)",
"def kml_2_open_airspace_and_json_format(self, full_path):\n # read file\n f = open(full_path,'r')\n kml = f.readlines()\n f.close()\n # find airspaces\n \"\"\"Placemark >\n < name > Bremen - Blumenthal\n Thermikplatte < / name >\n < styleUrl > # inline10</styleUrl>\n < Polygon >\n < tessellate > 1 < / tessellate >\n < outerBoundaryIs >\n < LinearRing >\n < coordinates >\n 8.529121049900063, 53.19549566929423, 0\n 8.52324583919868, 53.21131939607898, 0\n 8.545439298799483, 53.23055800702935, 0\n 8.588991466114615, 53.23047069814625, 0\n 8.575289966189502, 53.20745451706468, 0\n 8.560633120477348, 53.19724609335408, 0\n 8.529121049900063, 53.19549566929423, 0\n < / coordinates >\n \n < / LinearRing >\n < / outerBoundaryIs >\n < / Polygon >\n < / Placemark >\"\"\"\n container = []\n idxLine = 0\n did_not_pass_main_folder = True\n list_of_airspace_types_included = []\n while idxLine < len(kml):\n #print(kml[idxLine])\n #if '<Folder>' in kml[idxLine] and did_not_pass_main_folder:\n # # we have to jump over the first folder\n # print(f'Reading everything inside folder: {kml[idxLine]}')\n # did_not_pass_main_folder = False\n if '<Folder>' in kml[idxLine]: # begin of airspace\n as_type = kml[idxLine+1].replace('\\t','').replace('<name>','').replace('</name>\\n','') # <name>B</name>\n print('Reading AS-types: ' + as_type)\n list_of_airspace_types_included.append(as_type)\n #if not (as_type == 'A' or as_type == 'B'):\n # print('#### Check Folder / Airspace Types, must be \"A\" or \"B\" and try again (current %s)' % as_type)\n # msgbox('Check Folder / Airspace Types, are not \"A\" or \"B\" (current %s). Airspace E will be used for export.' % as_type)\n # as_type = 'E'\n\n if '<Placemark' in kml[idxLine]: # begin of airspace\n container = []\n if '</Placemark' in kml[idxLine]: # end of airspace\n # make sure only Polygons are stored\n for as_line in container:\n if '<Polygon>' in as_line:\n idx_lookAt_start = None\n for idx, line_of_container in enumerate(container):\n if \"<LookAt>\" in line_of_container:\n idx_lookAt_start = idx\n if \"</LookAt>\" in line_of_container:\n idx_lookAt_end = idx\n # Remove lookAt lines if necessary\n if idx_lookAt_start:\n container = container[0:idx_lookAt_start] + container[idx_lookAt_end+1::] # cut out look at part\n # append airspace to airspace list as airspace class\n self.airspaces.append(Airspace(lines=container, file_type='kml', as_type=as_type))\n container.append(kml[idxLine])\n idxLine += 1\n print('Loaded %d airspaces from KML-file (%s)' %(len(self.airspaces),full_path))\n # summary\n outlines = ['* KML conversion file, rename this line']\n json_dict = {\"circles\": [], \"polygons\": []}\n for airspace in self.airspaces:\n # prepare open-airspace formate\n outlines.append('\\n\\n') # separate airspaces\n outlines.extend(airspace.txt_lines)\n # prepare json\n json_dict['polygons'].append(airspace.json_dict)\n\n # write open airspace format\n target_path = full_path[:-4] + '_converted.txt'\n # uisave dialog\n\n target_path = filesavebox(default=target_path, filetypes=\"*.txt\")\n if target_path is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n f = open(target_path,'w')\n f.writelines(outlines)\n f.close()\n print('Result was written to: %s' % target_path)\n\n # write json:\n target_path_json = target_path[:-4] + '.json'\n\n json_string = json.dumps(json_dict)\n json_file = open(target_path_json, \"w\")\n json_file.write(json_string)\n json_file.close()\n\n # write list of airspace files for index.html for leaflet map\n print('The following airspace types have been converted:')\n print(list_of_airspace_types_included)",
"def make_open_airspace_format(self):\n # Extract coordinates from KML\n for idxline in range(len(self.kml_lines)):\n if '<name>' in self.kml_lines[idxline]:\n self.name = self.kml_lines[idxline].replace('\\t', '').replace('<name>', '').replace('</name>', '').replace('\\n','')\n if not self.name.startswith('TS'):\n self.name = 'TS_' + self.name\n print('Type: %s | Name: %s' % (self.as_type, self.name))\n if '<coordinates>' in self.kml_lines[idxline]:\n self.coordinates_kml = self.kml_lines[idxline + 1].replace('\\t', '').replace('\\n', '')\n break\n # start conversion to airspace format\n \"\"\" AC A\n AN TS_Erzgeb\n AL FL98\n AH FL99\n DP 50:26:22 N 012:17:59 E\n DP 50:25:25 N 012:18:26 E\n DP 50:24:40 N 012:19:01 E\n DP 50:24:06 N 012:19:46 E\"\"\"\n\n # AC A\n self.txt_lines.append('AC %s\\n' % self.as_type)\n # AN TS_Erzgeb\n self.txt_lines.append('AN %s\\n' % self.name)\n # heights\n self.txt_lines.append('AL FL98\\n')\n self.txt_lines.append('AH FL99\\n')\n # coordinates\n for coo_pt in self.coordinates_kml.split(' ')[:-1]:\n # Target format: DP 50:26:22 N 012:17:59 E\n lat_long = coo_pt.split(',')\n # latitude\n latDecAsStr = lat_long[1].split('.')\n #if '.' not in latDecAsStr: # take care of case \"51\" instead of \"51.123456\"\n # latDecAsStr += '.000000'\n lat_degree = abs(int(latDecAsStr[0]))\n #print(f'latDecAsStr {latDecAsStr}')\n if len(latDecAsStr)==1:\n latDecAsStr.append('0')\n lat_secondDec = (float('0.' + latDecAsStr[1])*60) % 1\n lat_minute = round((float('0.' + latDecAsStr[1])*60) - lat_secondDec)\n lat_second = round(lat_secondDec*60)\n cooString = ('DP %02d:%02d:%02d' %(lat_degree,lat_minute,lat_second))\n if latDecAsStr[0].startswith('-'):\n cooString += ' S'\n else:\n cooString += ' N'\n # longitude\n #print(f'converting lat_long {lat_long}')\n # take care of case: no decimal sign included, case \"11\" instead of \"11.123456\"\n if '.' not in lat_long[0]:\n lat_long[0] += '.0'\n lonDecAsStr = lat_long[0].split('.')\n lon_degree = abs(int(lonDecAsStr[0]))\n lon_secondDec = (float('0.' + lonDecAsStr[1]) * 60) % 1\n lon_minute = round((float('0.' + lonDecAsStr[1]) * 60) - lon_secondDec)\n lon_second = round(lon_secondDec * 60)\n cooString += (' %03d:%02d:%02d' % (lon_degree, lon_minute, lon_second))\n if lonDecAsStr[0].startswith('-'):\n cooString += ' W'\n else:\n cooString += ' E'\n cooString += '\\n'\n self.txt_lines.append(cooString)",
"def keyholemarkup2x(file,output='df'):\n r = re.compile(r'(?<=\\.)km+[lz]?',re.I)\n try:\n extension = r.search(file).group(0) #(re.findall(r'(?<=\\.)[\\w]+',file))[-1]\n \n \n except IOError as e:\n logging.error(\"I/O error {0}\".format(e))\n if (extension.lower()=='kml') is True:\n buffer = file\n elif (extension.lower()=='kmz') is True:\n kmz = ZipFile(file, 'r')\n \n vmatch = np.vectorize(lambda x:bool(r.search(x)))\n A = np.array(kmz.namelist())\n sel = vmatch(A)\n buffer = kmz.open(A[sel][0],'r')\n \n else:\n raise ValueError('Incorrect file format entered. Please provide the '\n 'path to a valid KML or KMZ file.') \n \n \n parser = xml.sax.make_parser()\n handler = PlacemarkHandler()\n parser.setContentHandler(handler)\n parser.parse(buffer)\n \n try:\n kmz.close()\n except:\n pass\n \n df = pd.DataFrame(handler.mapping).T\n names = list(map(lambda x: x.lower(),df.columns))\n if 'description' in names:\n extradata = df.apply(PlacemarkHandler.htmlizer,axis=1)\n df = df.join(extradata)\n \n \n output = output.lower()\n \n if output=='df' or output=='dataframe' or output == None:\n result = df\n \n elif output=='csv':\n out_filename = file[:-3] + \"csv\"\n df.to_csv(out_filename,encoding='utf-8',sep=\"\\t\")\n result = (\"Successfully converted {0} to CSV and output to\"\n \" disk at {1}\".format(file,out_filename))\n \n elif output=='gpd' or output == 'gdf' or output=='geoframe' or output == 'geodataframe':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n result = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n \n \n elif output=='geojson' or output=='json':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n try:\n import geojson\n except ImportError as e:\n raise ImportError('This operation requires geojson. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"geojson\"\n gdf.to_file(out_filename,driver='GeoJSON')\n validation = geojson.is_valid(geojson.load(open(out_filename)))['valid']\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to GeoJSON and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The geojson conversion did not create a '\n 'valid geojson object. Try to clean your '\n 'data or try another file.')\n \n elif output=='shapefile' or output=='shp' or output =='esri shapefile':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n \n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n try:\n import shapefile\n except ImportError as e:\n raise ImportError('This operation requires pyshp. {0}'.format(e))\n \n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"shp\"\n gdf.to_file(out_filename,driver='ESRI Shapefile')\n sf = shapefile.Reader(out_filename)\n import shapefile\n sf = shapefile.Reader(out_filename)\n if len(sf.shapes())>0:\n validation = \"yes\"\n else:\n validation = \"no\"\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to Shapefile and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The Shapefile conversion did not create a '\n 'valid shapefile object. Try to clean your '\n 'data or try another file.') \n else:\n raise ValueError('The conversion returned no data; check if'\n ' you entered a correct output file type. '\n 'Valid output types are geojson, shapefile,'\n ' csv, geodataframe, and/or pandas dataframe.')\n \n return result",
"def make_kml_format(self,kml_template):\n if self.as_type == 'A':\n self.kml_lines = kml_template['good_subdivided']['placemark']\n elif self.as_type == 'B':\n self.kml_lines = kml_template['bad_subdivided']['placemark']\n else:\n print('Unknown airspace type')\n # get idx of name and coordinates\n idxLine = 0\n while idxLine < len(self.kml_lines):\n #print(self.kml_lines[idxLine]\n if self.kml_lines[idxLine].startswith('\\t\\t\\t\\t<name>'): # begin of airspace\n idx_name = idxLine\n if '\\t\\t\\t\\t\\t\\t\\t<coordinates>\\n' in self.kml_lines[idxLine]: # begin of airspace\n idx_coordinates = idxLine+1\n idxLine += 1\n # transform coordinates\n # add all coordinates: Format is:\n # source: 'DP 50:26:22 N 012:17:59 E\\n'\n # target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0\n coo_list = [] # collect list of coorinates as strings\n for line in self.txt_lines:\n if line.startswith('AN'):\n self.name = line[3:].replace('\\n','')\n self.kml_lines[idx_name] = '\\t\\t\\t\\t<name>%s</name>\\n' % self.name\n\n if line.startswith('DP'):\n # lon\n lon_deg = float(line[14:17])\n lon_min = float(line[18:20])\n lon_sec = float(line[21:23])\n lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg\n if line[24] == 'W':\n lon_dec *= -1 # negative if west\n # lat\n lat_deg = float(line[3:5])\n lat_min = float(line[6:8])\n lat_sec = float(line[9:11])\n lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg\n if line[12] == 'S':\n lat_dec *= -1 # negative if west\n # attach coordinates\n coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec))\n # store for later plotting\n self.lat_dec.append(lat_dec)\n self.lon_dec.append(lon_dec)\n\n # make sure that shape is closed --> first an last point must be the same\n if coo_list[0] != coo_list[-1]:\n coo_list.append(coo_list[0])\n self.lat_dec.append(self.lat_dec[0])\n self.lon_dec.append(self.lon_dec[0])\n\n # write coordinate strings into kml\n self.kml_lines[idx_coordinates] = '\\t\\t\\t\\t\\t\\t\\t\\t' # is prefix. Coordinates to be added as string below\n for pt in coo_list:\n self.kml_lines[idx_coordinates] += pt\n print('Converted airspace %s' % self.name)",
"def make_input_data_kmls(rundata):\n \n import os\n from . import topotools, dtopotools\n\n regions2kml(rundata, combined=False)\n gauges2kml(rundata)\n\n topofiles = rundata.topo_data.topofiles\n for f in topofiles:\n topo_file_name = f[-1]\n topo_type = f[0]\n topo2kml(topo_file_name, topo_type)\n \n dtopofiles = rundata.dtopo_data.dtopofiles\n for f in dtopofiles:\n dtopo_file_name = f[-1]\n dtopo_type = f[0]\n dtopo2kml(dtopo_file_name, dtopo_type)",
"def makepkl():\n # Old osgeo.ogr approach\n from osgeo import ogr\n # USTimeZones.kml source is unknown, but was freely available and\n # Has been converted to a pkl file\n kmlpath = os.path.join(os.path.dirname(__file__), 'USTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(uspklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(uspklpath, 'w'))\n\n # WorldTimeZones.kml source is below and was freely available and\n # Has been converted to a pkl file\n # https://productforums.google.com/forum/?fromgroups=#!msg/gec-tools/EdR18tz_5k8/MRPV85OxXIkJ\n kmlpath = os.path.join(os.path.dirname(__file__), 'WorldTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(worldpklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(worldpklpath, 'w'))",
"def regions2kml(rundata=None,fname='regions.kml',verbose=True,combined=True):\n\n from numpy import cos,pi,floor\n\n if rundata is None:\n try:\n import setrun\n reload(setrun)\n rundata = setrun.setrun()\n except:\n raise IOError(\"*** cannot execute setrun file\")\n\n clawdata = rundata.clawdata\n x1,y1 = clawdata.lower[0:]\n x2,y2 = clawdata.upper[0:]\n description = \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\\n\" % (f2s(y1),f2s(y2))\n\n mx,my = clawdata.num_cells[0:]\n dx = (x2-x1)/float(mx)\n dx_meters = dx*111e3*cos(pi*0.5*(y1+y2)/180.)\n dy = (y2-y1)/float(my)\n dy_meters = dy*111e3\n if verbose:\n print(\"Domain: %10.6f %10.6f %10.6f %10.6f\" % (x1,x2,y1,y2))\n dx_deg,dx_min,dx_sec = deg2dms(dx)\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n #print \"Level 1 resolution: dx = %g deg, %g min, %g sec = %g meters\" \\\n # % (dx_deg,dx_min,dx_sec,dx_meters)\n levtext = \"Level 1 resolution: dy = %g deg, %g min, %g sec = %g meters\\n\" \\\n % (dy_deg,dy_min,dy_sec,dy_meters)\n if verbose:\n print(levtext)\n description = description + levtext\n\n amr_levels_max = rundata.amrdata.amr_levels_max\n refinement_ratios_y = rundata.amrdata.refinement_ratios_y\n num_ref_ratios = len(refinement_ratios_y)\n if amr_levels_max > num_ref_ratios+1:\n raise IOError(\"*** Too few refinement ratios specified for \" \\\n + \"amr_levels_max = %i\" % amr_levels_max)\n dy_levels = (num_ref_ratios+1) * [dy]\n for k,r in enumerate(refinement_ratios_y):\n level = k+2\n dy = dy_levels[k] / r\n dy_levels[k+1] = dy\n dy_meters = dy*111e3\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n levtext = \"Level %s resolution: dy = %g deg, %g min, %g sec = %g meters (refined by %i)\\n\" \\\n % (level,dy_deg,dy_min,dy_sec,dy_meters,r)\n if verbose:\n print(levtext)\n description = description + levtext\n\n if verbose:\n print(\"Allowing maximum of %i levels\" % amr_levels_max)\n\n elev = 0.\n if not combined:\n fname = 'Domain.kml'\n\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = 'Computational Domain'\n mapping['desc'] = description\n mapping['color'] = \"0000FF\" # red\n mapping['width'] = 2\n\n region_text = kml_region(mapping)\n kml_text = kml_text + region_text\n\n if not combined:\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)\n\n \n\n regions = rundata.regiondata.regions\n if len(regions)==0 and verbose:\n print(\"No regions found in setrun.py\")\n\n\n for rnum,region in enumerate(regions):\n if not combined:\n fname = 'Region_%s.kml' % str(rnum).zfill(2)\n kml_text = kml_header(fname)\n\n minlevel,maxlevel = region[0:2]\n t1,t2 = region[2:4]\n x1,x2,y1,y2 = region[4:]\n\n if verbose:\n print(\"Region %i: %10.6f %10.6f %10.6f %10.6f\" \\\n % (rnum,x1,x2,y1,y2))\n print(\" minlevel = %i, maxlevel = %i\" \\\n % (minlevel,maxlevel) \\\n + \" t1 = %s, t2 = %s\" % (f2s(t1),f2s(t2)))\n mapping = {}\n mapping['minlevel'] = minlevel\n mapping['maxlevel'] = maxlevel\n mapping['t1'] = t1\n mapping['t2'] = t2\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = 'Region %i' % rnum\n description = \"minlevel = %i, maxlevel = %i\\n\" % (minlevel,maxlevel) \\\n + \" t1 = %s, t2 = %s\\n\" % (f2s(t1),f2s(t2)) \\\n + \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\\n\\n\" % (f2s(y1),f2s(y2))\n if len(dy_levels) >= minlevel:\n dy = dy_levels[minlevel-1]\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n dy_meters = dy*111e3\n levtext = \"Level %s resolution: \\ndy = %g deg, %g min, %g sec \\n= %g meters\\n\" \\\n % (minlevel,dy_deg,dy_min,dy_sec,dy_meters)\n description = description + levtext\n if (maxlevel > minlevel) and (len(dy_levels) >= maxlevel):\n dy = dy_levels[maxlevel-1]\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n dy_meters = dy*111e3\n levtext = \"\\nLevel %s resolution: \\ndy = %g deg, %g min, %g sec \\n= %g meters\\n\" \\\n % (maxlevel,dy_deg,dy_min,dy_sec,dy_meters)\n description = description + levtext\n mapping['desc'] = description\n mapping['color'] = \"FFFFFF\" # white\n mapping['width'] = 3\n\n region_text = kml_region(mapping)\n kml_text = kml_text + region_text\n if not combined:\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)\n\n if combined:\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def importKML(filepath):\n\tf = open(filepath, 'r')\n\tstr = f.read()\n\treturn etree.fromstring(str)",
"def main():\n input_file_path = sys.argv[1]\n output_file_path = sys.argv[2]\n gps_df = create_df(input_file_path) # creates a data frame\n gps_df = clean_data(gps_df) # cleans the data\n print('Cleaning done')\n write_to_kml(gps_df, output_file_path) # writes to kml file",
"def transform(infile, output, insrs, format_name):\n\n logging.info('Transforming %s from %s to %s' % (infile, insrs, output)) \n in_srs = osr.SpatialReference()\n in_srs.ImportFromEPSG(insrs)\n out_srs = osr.SpatialReference()\n out_srs.ImportFromEPSG(4324)\n coordTrans = osr.CoordinateTransformation(in_srs, out_srs)\n\n in_dsn = ogr.Open(infile)\n in_layer = in_dsn.GetLayer()\n in_feature_definition = in_layer.GetLayerDefn()\n\n out_driver = ogr.GetDriverByName(format_name)\n out_dsn = out_driver.CreateDataSource(output)\n out_layer = out_dsn.CreateLayer(in_layer.GetName(),\n geom_type=in_layer.GetGeomType())\n\n # add fields\n for i in range(0, in_feature_definition.GetFieldCount()):\n fieldDefn = in_feature_definition.GetFieldDefn(i)\n out_layer.CreateField(fieldDefn)\n\n # get the output layer's feature definition\n out_feature_definition = out_layer.GetLayerDefn()\n\n # loop through the input features\n inFeature = in_layer.GetNextFeature()\n while inFeature:\n # get the input geometry\n geom = inFeature.GetGeometryRef().Clone()\n # reproject the geometry\n geom.Transform(coordTrans)\n # create a new feature\n outFeature = ogr.Feature(out_feature_definition)\n # set the geometry and attribute\n outFeature.SetGeometry(geom)\n for i in range(0, out_feature_definition.GetFieldCount()):\n outFeature.SetField(out_feature_definition.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i))\n # add the feature to the shapefile\n out_layer.CreateFeature(outFeature)\n # destroy the features and get the next input feature\n outFeature.Destroy()\n inFeature.Destroy()\n inFeature = in_layer.GetNextFeature()\n\n # close the shapefiles\n in_dsn.Destroy()\n out_dsn.Destroy()",
"def topo2kml(topo_file_name, topo_type, color='00FF00'):\n\n import os\n from clawpack.geoclaw import topotools\n topo = topotools.Topography(topo_file_name, topo_type=topo_type)\n topo.read_header()\n xy = topo.extent\n name = os.path.splitext(os.path.split(topo_file_name)[-1])[0]\n file_name = '%s.kml' % name\n box2kml(xy, file_name, name, color)",
"def dtopo2kml(dtopo_file_name, dtopo_type, color='8888FF'):\n\n import os\n from clawpack.geoclaw import dtopotools\n dtopo = dtopotools.DTopography()\n dtopo.read(dtopo_file_name, dtopo_type)\n x1 = dtopo.x.min()\n x2 = dtopo.x.max()\n y1 = dtopo.y.min()\n y2 = dtopo.y.max()\n xy = (x1,x2,y1,y2)\n name = os.path.splitext(os.path.split(dtopo_file_name)[-1])[0]\n file_name = '%s.kml' % name\n box2kml(xy, file_name, name, color)",
"def line2kml(xy,fname='line.kml',name='line',color='00FFFF',width=3,\n verbose=True):\n \n if type(xy[0]) is tuple:\n x1,x2 = xy[0]\n y1,y2 = xy[1]\n else:\n x1,x2,y1,y2 = xy[0:]\n\n if verbose:\n print(\"Line: %10.6f %10.6f %10.6f %10.6f\" % (x1,x2,y1,y2))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = name\n mapping['desc'] = \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\" % (f2s(y1),f2s(y2))\n mapping['color'] = color\n mapping['width'] = width\n\n region_text = kml_line(mapping)\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def export_kmz(self):\n self.export_kml(kmz=True)",
"def tdump2kml(inputDir):\n # Check inputdir\n if not os.path.exists(inputDir):\n print(\"Entered directory is invalid.\")\n sys.exit()\n\n os.chdir(inputDir)\n\n # Main loop\n for run in os.walk('.').next()[1]:\n\n os.chdir(run)\n\n # Filter tdump files\n files = glob.glob(\"*.tdump\")\n\n # Conversion\n for entry in files:\n p = subprocess.Popen(\"C:\\\\hysplit4\\\\exec\\\\trajplot.exe -i%s -o%s.ps -a3 -v1 -l1\" % \\\n (entry, entry), shell=True, stdout=subprocess.PIPE)\n p.wait()\n os.remove(entry[:-6])\n #p_out = p.communicate()\n #print p_out[0], p_out[1]\n\n # Move all kmls into dir kmls\n #sys.stdout.flush()\n kmls = glob.glob(\"*.kml\")\n\n if not os.path.exists(\"kmls\"):\n os.makedirs(\"kmls\")\n\n for kml in kmls:\n os.rename(kml, \"kmls\\\\%s\" % kml)\n\n # Remove redundant ps files\n pss = glob.glob(\"*.ps\")\n\n for ps in pss:\n os.remove(ps)\n\n print \"DONE : %s %s\\kmls\" % (run, os.getcwd())\n os.chdir('../')",
"def kml(cls, user, logs, kml, kml_doc):\n # KML Compliant Datetime Formatter\n kml_datetime_format = \"%Y-%m-%dT%H:%M:%S.%fZ\"\n icon = 'http://maps.google.com/mapfiles/kml/shapes/airports.png'\n threshold = 1 # Degrees\n\n kml_folder = kml.newfolder(name=user.username)\n\n flights = TakeoffOrLandingEvent.flights(user)\n if len(flights) == 0:\n return\n\n logs = filter(lambda log: cls._is_bad_position(log, threshold), logs)\n for i, flight in enumerate(flights):\n label = 'Flight {}'.format(i + 1) # Flights are one-indexed\n kml_flight = kml_folder.newfolder(name=label)\n\n flight_logs = filter(lambda x: flight.within(x.timestamp), logs)\n if len(flight_logs) < 2:\n continue\n\n coords = []\n angles = []\n when = []\n for entry in flight_logs:\n pos = entry.uas_position.gps_position\n # Spatial Coordinates\n coord = (pos.longitude, pos.latitude,\n units.feet_to_meters(entry.uas_position.altitude_msl))\n coords.append(coord)\n\n # Time Elements\n time = entry.timestamp.strftime(kml_datetime_format)\n when.append(time)\n\n # Degrees heading, tilt, and roll\n angle = (entry.uas_heading, 0.0, 0.0)\n angles.append(angle)\n\n # Create a new track in the folder\n trk = kml_flight.newgxtrack(name='Flight Path')\n trk.altitudemode = AltitudeMode.absolute\n\n # Append flight data\n trk.newwhen(when)\n trk.newgxcoord(coords)\n trk.newgxangle(angles)\n\n # Set styling\n trk.extrude = 1 # Extend path to ground\n trk.style.linestyle.width = 2\n trk.style.linestyle.color = Color.blue\n trk.iconstyle.icon.href = icon\n\n for obstacle in MovingObstacle.objects.all():\n obstacle.kml(path=flight_logs, kml=kml_flight, kml_doc=kml_doc)",
"def export_kml(self, kmz=False):\n orderby = self.orderby.get()\n currentregion = self.region.get()\n if kmz:\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".kmz\",\n filetypes=((\"keyhole markup language\", \"*.kmz\"),\n (\"All Files\", \"*.*\")))\n else:\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".kml\",\n filetypes=((\"keyhole markup language\", \"*.kml\"),\n (\"All Files\", \"*.*\")))\n if outputfile:\n self.tabs.window.aistracker.create_kml_map(\n outputfile, kmzoutput=kmz, orderby=orderby,\n region=currentregion)\n else:\n raise ExportAborted('Export cancelled by user.')",
"def read_kml():\n global kmldata\n global CONFIG\n if type(kmldata) == type(None):\n if not os.path.exists(CONFIG[\"kmlfile\"]):\n fiona.drvsupport.supported_drivers['KML'] = 'rw'\n kmldata = geopandas.read_file(CONFIG[\"kmlrepo\"], driver=\"KML\")\n os.makedirs(CONFIG[\"cachedir\"],exist_ok=True)\n with open(CONFIG[\"kmlfile\"], \"wb\") as fh:\n pickle.dump(kmldata,fh)\n else:\n with open(CONFIG[\"kmlfile\"], \"rb\") as fh:\n kmldata = pickle.load(fh)\n return kmldata",
"def get_kml_object(filename: str) -> fastkml.kml.KML:\n\t\n\tkml_obj = fastkml.kml.KML()\n\t\n\twith open(filename) as file:\n\t\tkml_obj.from_string(file.read().encode(\"utf-8\"))\n\t\n\treturn kml_obj",
"def run(self,\n altitude: float,\n day_of_year: float,\n local_time: float,\n latitude: float,\n longitude: float,\n f107: float,\n f107m: float,\n kp1: float,\n kp2: float,\n get_uncertainty: bool = False\n ):\n\n output_file = tempfile.NamedTemporaryFile(\n delete=False, suffix=\".out\", prefix=\"swami_\", mode=\"r+\")\n\n data_dtm = str(self.path_to_data)\n data_dtm = data_dtm + \"/\" if data_dtm[-1] != \"/\" else data_dtm\n data_um = str(os.path.join(self.path_to_data, \"um\"))\n data_um = data_um + \"/\" if data_um[-1] != \"/\" else data_um\n\n is_mcm = True if self.model is _AtmModel.MCM else False\n is_dtm = True if self.model is _AtmModel.DTM2020 else False\n is_um = True if self.model is _AtmModel.UM else False\n\n input_dict = {\n \"altitude\": float(altitude),\n \"day_of_year\": float(day_of_year),\n \"local_time\": float(local_time),\n \"latitude\": float(latitude),\n \"longitude\": float(longitude),\n \"f107\": float(f107),\n \"f107m\": float(f107m),\n \"kp1\": float(kp1),\n \"kp2\": float(kp2),\n \"bMCM\": is_mcm,\n \"bDTM\": is_dtm,\n \"bUM\": is_um,\n \"bUMstd\": bool(get_uncertainty), # and is_um,\n \"bDTMunc\": bool(get_uncertainty), # and is_dtm,\n \"data_dtm\": data_dtm,\n \"data_um\": data_um,\n \"output_file\": str(output_file.name)\n }\n\n input_file = self._generate_nml_from_dict(input_dict)\n\n cmd = [str(self.path_to_bin), input_file]\n\n proc = subprocess.run(cmd, check=True)\n\n out = self._read_output_file(output_file.name)\n out[\"_input\"] = input_dict\n\n os.unlink(input_file)\n os.unlink(output_file.name)\n\n return out",
"def main():\n #short GPS Test\n filename = 'KML_short_test.kml'\n gps_filename = 'gps_short_test.txt'\n gpsfile = open(gps_filename, 'r')\n file = open(filename, 'w')\n addHeader(file)\n coordinate_lst = convert(gpsfile)\n cleaned = GPS_to_CostMap.clean_gps_data(coordinate_lst)\n write_coordinates(cleaned, file)\n addTrailer(file)\n file.close()\n\n #Repeat test\n filename = 'KML_repeat_test1.kml'\n gps_filename = 'gps_1.txt'\n gpsfile = open(gps_filename, 'r')\n file = open(filename, 'w')\n addHeader(file)\n coordinate_lst = convert(gpsfile)\n cleaned = GPS_to_CostMap.clean_gps_data(coordinate_lst)\n write_coordinates(cleaned, file)\n addTrailer(file)\n file.close()\n\n filename = 'KML_repeat_test2.kml'\n gps_filename = 'gps_1.txt'\n gpsfile = open(gps_filename, 'r')\n file = open(filename, 'w')\n addHeader(file)\n coordinate_lst = convert(gpsfile)\n cleaned = GPS_to_CostMap.clean_gps_data(coordinate_lst)\n write_coordinates(cleaned, file)\n addTrailer(file)\n file.close()",
"def kml_file_to_open511_element(filename):\n ds = DataSource(filename)\n base_element = get_base_open511_element(lang='fr')\n for layer in ds:\n for feature in layer:\n base_element.append(feature_to_open511_element(feature))\n return base_element",
"def read_szx_fmv_11(eps_file):\n raw_data = eps_file.scaled_mdr\n raw_unscaled = eps_file.mdr\n mphr = eps_file.mphr\n\n n_node_per_line = raw_data[\"LONGITUDE\"].shape[1]\n n_lines = raw_data[\"LONGITUDE\"].shape[0]\n n_records = raw_data[\"LONGITUDE\"].size\n\n data = {}\n metadata = {}\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n ascat_time = shortcdstime2jd(raw_data[\"UTC_LINE_NODES\"].flatten()[\"day\"],\n raw_data[\"UTC_LINE_NODES\"].flatten()[\"time\"])\n data[\"jd\"] = ascat_time[idx_nodes]\n\n metadata[\"spacecraft_id\"] = np.int8(mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(mphr[\"ORBIT_START\"])\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n\n for f in fields:\n metadata[f] = np.int16(mphr[f.upper()])\n\n fields = [\"sat_track_azi\"]\n for f in fields:\n data[f] = raw_data[f.upper()].flatten()[idx_nodes]\n\n fields = [(\"longitude\", long_nan), (\"latitude\", long_nan),\n (\"swath_indicator\", byte_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].flatten()\n valid = raw_unscaled[f.upper()].flatten() != nan_val\n data[f][~valid] = nan_val\n\n fields = [(\"sigma0_trip\", long_nan), (\"inc_angle_trip\", uint_nan),\n (\"azi_angle_trip\", int_nan), (\"kp\", uint_nan),\n (\"f_kp\", byte_nan), (\"f_usable\", byte_nan), (\"f_f\", uint_nan),\n (\"f_v\", uint_nan), (\"f_oa\", uint_nan), (\"f_sa\", uint_nan),\n (\"f_tel\", uint_nan), (\"f_land\", uint_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].reshape(n_records, 3)\n valid = raw_unscaled[f.upper()].reshape(n_records, 3) != nan_val\n data[f][~valid] = nan_val\n\n # modify longitudes from (0, 360) to (-180,180)\n mask = np.logical_and(data[\"longitude\"] != long_nan,\n data[\"longitude\"] > 180)\n data[\"longitude\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n mask = (data[\"azi_angle_trip\"] != int_nan) & (data[\"azi_angle_trip\"] < 0)\n data[\"azi_angle_trip\"][mask] += 360\n\n data[\"node_num\"] = np.tile((np.arange(n_node_per_line) + 1),\n n_lines).astype(np.uint8)\n data[\"line_num\"] = idx_nodes.astype(np.uint16)\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n return data, metadata",
"def k2lc(epic):\n prefix = epic[:4]\n id = epic[4:]\n c = \"01\"\n path = \"data/c01/{0}00000/{1}\".format(prefix, id)\n end = \"kepler_v1.0_lc.fits\"\n file = \"{0}/hlsp_everest_k2_llc_{1}-c{2}_{3}\".format(path, epic, c, end)\n x, y = process_data(file)\n return x, y",
"def convert(input_filename, output_filename):\n c_file = pkg_resources.resource_filename('ShapelyChipDesigns', 'convert.rb')\n os.system('klayout -z -rd input='+input_filename+' -rd output='+output_filename+' -r '+c_file)",
"def poly2kml(xy,fname=None,name='poly',color='00FF00', width=3,\n verbose=True):\n\n if fname is None:\n fname = name + '.kml'\n\n x,y = xy\n\n if verbose:\n print(\"Polygon: %10.6f %10.6f\" % (x[0],y[0]))\n for j in range(1,len(x)):\n print(\" %10.6f %10.6f\" % (x[j],y[j]))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x'] = x\n mapping['y'] = y\n mapping['elev'] = elev\n mapping['name'] = name\n d = \" x[0] = %s, y[0] = %s\\n\" % (x[0],y[0]) \n for j in range(1,len(x)):\n d = d + \" x[%i] = %s, y[%i] = %s\" % (j,f2s(x[j]),j,f2s(y[j]))\n mapping['desc'] = d\n mapping['color'] = color\n mapping['width'] = width\n\n v = \"\\n\"\n for j in range(len(x)):\n v = v + \"%s,%s,%s\\n\" % (f2s(x[j]),f2s(y[j]),f2s(elev))\n v = v + \"%s,%s,%s\\n\" % (f2s(x[0]),f2s(y[0]),f2s(elev))\n v.replace(' ','')\n \n region_text = kml_region(mapping, v)\n for j in range(1,len(x)):\n d = d + \" x[%i] = %s, y[%i] = %s\" % (j,f2s(x[j]),j,f2s(y[j]))\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def astrometry_script(filename, catalog=\"PS\", rotation_scaling=True, xy_transformation=True, fine_transformation=True, images=False, vignette=3,vignette_rectangular=1., cutouts=None, ra=None, dec=None, projection_ra=None, projection_dec=None, verbose=False, save_images=False, ignore_header_rot=False, radius=-1., save_bad_result=False, silent=False, sigma_threshold_for_source_detection=5, high_res = False, hdul_idx=0, filename_for_sources=None, FWHM=4):\n #print(\"Program version: 1.2\")\n\n report = {}\n if(images):\n plt.ioff()\n warnings.simplefilter('ignore', UserWarning)\n fits_image_filename = filename\n\n print(\"> Astrometry for {} \".format(fits_image_filename))\n\n with fits.open(fits_image_filename) as hdul:\n #print(hdul.info())\n #print(hdul[0].header)\n\n hdu = hdul[hdul_idx]\n #hdu.verify('fix')\n hdr = hdu.header\n\n\n image_or = hdul[hdul_idx].data.astype(float)\n median = np.nanmedian(image_or)\n image_or[np.isnan(image_or)]=median\n image = image_or - median\n\n observation = find_sources(image, vignette,vignette_rectangular,cutouts, sigma_threshold_for_source_detection, FWHM=FWHM)\n #print(observation)\n\n #changed order of positions to [(x,y), (x,y),...] for compatibility with photutils 1.4\n xcenters = np.array(observation['xcenter'])\n ycenters = np.array(observation['ycenter'])\n positions = [(xcenters[i], ycenters[i]) for i in range(len(xcenters))]\n apertures = CircularAperture(positions, r=4.)\n\n\n #world coordinates\n if(not silent):\n print(\">Info found in the file -- (CRVAl: position of central pixel (CRPIX) on the sky)\")\n print(WCS(hdr))\n\n hdr[\"NAXIS1\"] = image.shape[0]\n hdr[\"NAXIS2\"] = image.shape[1]\n\n #wcsprm = Wcsprm(hdr.tostring().encode('utf-8')) #everything else gave me errors with python 3, seemed to make problems with pc conversios, so i wwitched to the form below\n wcsprm = WCS(hdr).wcs\n wcsprm_original = WCS(hdr).wcs\n wcsprm, fov_radius, INCREASE_FOV_FLAG, PIXSCALE_UNCLEAR = read_additional_info_from_header(wcsprm, hdr, ra, dec,projection_ra, projection_dec, ignore_header_rot, radius)\n if(verbose):\n print(WCS(wcsprm.to_header()))\n coord = SkyCoord(wcsprm.crval[0], wcsprm.crval[1], unit=(u.deg, u.deg), frame=\"icrs\")\n if(not PIXSCALE_UNCLEAR):\n if(wcsprm.crpix[0] < 0 or wcsprm.crpix[1] < 0 or wcsprm.crpix[0] > image.shape[0] or wcsprm.crpix[1] > image.shape[1] ):\n if(not silent):\n print(\"central value outside of the image, moving it to the center\")\n coord_radec = wcsprm.p2s([[image.shape[0]/2, image.shape[1]/2]], 0)[\"world\"][0]\n coord = SkyCoord(coord_radec[0], coord_radec[1], unit=(u.deg, u.deg), frame=\"icrs\")\n #print(wcsprm)\n\n\n\n #better: put in nice wrapper! with repeated tries and maybe try synchron!\n if(not silent):\n print(\">Dowloading catalog data\")\n radius = u.Quantity(fov_radius, u.arcmin)#will prob need more\n catalog_data = query.get_data(coord, radius, catalog)\n report[\"catalog\"] = catalog\n #reference = reference.query(\"mag <20\")\n \n\n if(catalog == \"GAIA\" and catalog_data.shape[0] < 5):\n if(not silent):\n print(\"GAIA seems to not have enough objects, will enhance with PS1\")\n catalog_data2 = query.get_data(coord, radius, \"PS\")\n report[\"catalog\"] = \"PS\"\n catalog_data = pd.concat([catalog_data, catalog_data2])\n #apertures_catalog = CircularAperture(wcs.wcs_world2pix(catalog_data[[\"ra\", \"dec\"]], 1), r=5.)\n if(not silent):\n print(\"Now we have a total of {} sources. Keep in mind that there might be duplicates now since we combined 2 catalogs\".format(catalog_data.shape[0]))\n elif(catalog == \"PS\" and (catalog_data is None or catalog_data.shape[0] < 5)):\n if(not silent):\n print(\"We seem to be outside the PS footprint, enhance with GAIA data\")\n catalog_data2 = query.get_data(coord, radius, \"GAIA\")\n report[\"catalog\"] = \"GAIA\"\n catalog_data = pd.concat([catalog_data, catalog_data2])\n #apertures_catalog = CircularAperture(wcs.wcs_world2pix(catalog_data[[\"ra\", \"dec\"]], 1), r=5.)\n if(not silent):\n print(\"Now we have a total of {} sources. Keep in mind that there might be duplicates now since we combined 2 catalogs\".format(catalog_data.shape[0]))\n\n max_sources = 400\n if(INCREASE_FOV_FLAG):\n max_sources= max_sources*2.25 #1.5 times the radius, so 2.25 the area\n if(catalog_data.shape[0]>max_sources):\n catalog_data = catalog_data.nsmallest(400, \"mag\")\n #remove duplicates in catalog?\n\n apertures_catalog = CircularAperture(wcsprm.s2p(catalog_data[[\"ra\", \"dec\"]], 1)['pixcrd'], r=5.)\n #plotting what we have, I keep it in the detector field, world coordinates are more painfull to plot\n if(images):\n fig = plt.figure()\n fig.canvas.manager.set_window_title('Input for {}'.format(fits_image_filename))\n plt.xlabel(\"pixel x direction\")\n plt.ylabel(\"pixel y direction\")\n plt.title(\"Input - red: catalog sources, blue: detected sources in img\")\n plt.imshow(image,cmap='Greys', origin='lower', norm=LogNorm())\n apertures.plot(color='blue', lw=1.5, alpha=0.5)\n apertures_catalog.plot(color='red', lw=1.5, alpha=0.5)\n\n plt.xlim(-200,image.shape[0]+200)\n plt.ylim(-200,image.shape[1]+200)\n if(save_images):\n name_parts = fits_image_filename.rsplit('.', 1)\n plt.savefig(name_parts[0]+\"_image_before.pdf\")\n\n ###tranforming to match the sources\n if(not silent):\n print(\"---------------------------------\")\n print(\">Finding the transformation\")\n if(rotation_scaling):\n if(not silent):\n print(\"Finding scaling and rotation\")\n wcsprm = register.get_scaling_and_rotation(observation, catalog_data, wcsprm, scale_guessed=PIXSCALE_UNCLEAR, verbose=verbose)\n if(xy_transformation):\n if(not silent):\n print(\"Finding offset\")\n wcsprm,_,_ = register.offset_with_orientation(observation, catalog_data, wcsprm, fast=False , INCREASE_FOV_FLAG=INCREASE_FOV_FLAG, verbose= verbose, silent=silent)\n\n #correct subpixel error\n compare_threshold = 3\n if(high_res):\n compare_threshold = 100\n obs_x, obs_y, cat_x, cat_y, distances = register.find_matches(observation, catalog_data, wcsprm, threshold=compare_threshold)#3\n if (len(distances) == 0): #meaning the list is empty\n best_score = 0\n else:\n rms = np.sqrt(np.mean(np.square(distances)))\n best_score = len(obs_x)/(rms+10) #start with current best score\n fine_transformation_success = False\n if(fine_transformation):\n print(\"Finding scaling and rotation\")\n lis = [2,3,5,8,10,6,4, 20,2,1,0.5]\n if(high_res):\n lis = [200,300,100,150,80,40,70, 20, 100, 30,9,5]\n skip_rot_scale = True\n for i in lis:\n wcsprm_new, score = register.fine_transformation(observation, catalog_data, wcsprm, threshold=i, compare_threshold=compare_threshold, skip_rot_scale=skip_rot_scale)\n if(i == 20):\n #only allow rot and scaling for the last few tries\n skip_rot_scale = False\n if(score> best_score):\n wcsprm = wcsprm_new\n best_score = score\n fine_transformation_success = True\n if not fine_transformation_success:\n if(not silent):\n print(\"Fine transformation did not improve result so will be discarded.\")\n else:\n if(not silent):\n print(\"Fine transformation applied to improve result\")\n #register.calculate_rms(observation, catalog_data,wcs)\n\n #make wcsprim more physical by moving scaling to cdelt, out of the pc matrix\n wcs =WCS(wcsprm.to_header())\n if(verbose):\n print(wcs)\n from astropy.wcs import utils\n scales = utils.proj_plane_pixel_scales(wcs)\n #print(scales)\n cdelt = wcsprm.get_cdelt()\n #print(cdelt)\n scale_ratio = scales/cdelt\n #print(scale_ratio)\n pc = np.array(wcsprm.get_pc())\n pc[0,0] = pc[0,0]/scale_ratio[0]\n pc[1,0] = pc[1,0]/scale_ratio[1]\n pc[0,1] = pc[0,1]/scale_ratio[0]\n pc[1,1] = pc[1,1]/scale_ratio[1]\n wcsprm.pc = pc\n wcsprm.cdelt = scales\n\n #WCS difference before and after\n if(not silent):\n print(\"> Compared to the input the Wcs was changed by: \")\n scales_original = utils.proj_plane_pixel_scales(WCS(hdr))\n if(not silent):\n print(\"WCS got scaled by {} in x direction and {} in y direction\".format(scales[0]/scales_original[0], scales[1]/scales_original[1]))\n #sources:\n #https://math.stackexchange.com/questions/2113634/comparing-two-rotation-matrices\n #https://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python/13849249#13849249\n def unit_vector(vector):\n \"\"\" Returns the unit vector of the vector. \"\"\"\n return vector / max(np.linalg.norm(vector), 1e-10)\n def matrix_angle( B, A ):\n \"\"\" comment cos between vectors or matrices \"\"\"\n Aflat = A.reshape(-1)\n Aflat = unit_vector(Aflat)\n Bflat = B.reshape(-1)\n Bflat = unit_vector(Bflat)\n #return np.arccos((np.dot( Aflat, Bflat ) / max( np.linalg.norm(Aflat) * np.linalg.norm(Bflat), 1e-10 )))\n return np.arccos(np.clip(np.dot(Aflat, Bflat), -1.0, 1.0))\n #print(matrix_angle(wcsprm.get_pc(), wcsprm_original.get_pc()) /2/np.pi*360)\n #bugfix: multiplying by cdelt otherwise the calculated angle is off by a tiny bit\n rotation_angle = matrix_angle(wcsprm.get_pc()@wcsprm.get_cdelt(), wcsprm_original.get_pc()@wcsprm_original.get_cdelt()) /2./np.pi*360.\n if((wcsprm.get_pc() @ wcsprm_original.get_pc() )[0,1] > 0):\n text = \"counterclockwise\"\n else:\n text = \"clockwise\"\n if(not silent):\n print(\"Rotation of WCS by an angle of {} deg \".format(rotation_angle)+text)\n old_central_pixel = wcsprm_original.s2p([wcsprm.crval], 0)[\"pixcrd\"][0]\n if(not silent):\n print(\"x offset: {} px, y offset: {} px \".format(wcsprm.crpix[0]- old_central_pixel[0], wcsprm.crpix[1]- old_central_pixel[1]))\n\n\n #check final figure\n if(images):\n fig = plt.figure()\n fig.canvas.manager.set_window_title('Result for {}'.format(fits_image_filename))\n plt.xlabel(\"pixel x direction\")\n plt.ylabel(\"pixel y direction\")\n plt.title(\"Result - red: catalog sources, blue: detected sources in img\")\n plt.imshow(image,cmap='Greys', origin='lower', norm=LogNorm())\n apertures.plot(color='blue', lw=1.5, alpha=0.5)\n #apertures_catalog = CircularAperture(wcs.wcs_world2pix(catalog_data[[\"ra\", \"dec\"]], 1), r=5.)\n apertures_catalog = CircularAperture(wcsprm.s2p(catalog_data[[\"ra\", \"dec\"]], 1)['pixcrd'], r=5.)\n\n apertures_catalog.plot(color='red', lw=1.5, alpha=0.5)\n if(save_images):\n name_parts = fits_image_filename.rsplit('.', 1)\n plt.savefig(name_parts[0]+\"_image_after.pdf\")\n if(not silent):\n print(\"--- Evaluate how good the transformation is ----\")\n dic_rms = register.calculate_rms(observation, catalog_data,wcsprm)\n #updating file\n converged = determine_if_fit_converged(dic_rms, catalog_data, observation, wcsprm, image.shape[0], image.shape[1], silent)\n report[\"converged\"] = converged\n report[\"matches\"] = dic_rms[\"matches\"]\n report[\"match_radius\"] = dic_rms[\"radius_px\"]\n if(converged or save_bad_result):\n write_wcs_to_hdr(fits_image_filename, wcsprm, report, hdul_idx=hdul_idx)\n if(filename_for_sources != None):\n wcs =WCS(wcsprm.to_header())\n observation_on_sky = wcs.wcs_pix2world(observation[[\"xcenter\",\"ycenter\"]], 1)\n #catalog_from_obs = np.zeros(observation_on_sky.shape[0], dtype={'names':('ra', 'dec', 'aperture_sum'),'formats':('f8', 'f8', 'f8')})\n catalog_from_obs = pd.DataFrame()\n catalog_from_obs[\"ra\"]= observation_on_sky[:,0]\n catalog_from_obs[\"dec\"]= observation_on_sky[:,1]\n catalog_from_obs[\"aperture_sum\"]= observation[\"aperture_sum\"]\n catalog_from_obs[\"mag\"]= -1.* observation[\"aperture_sum\"]#this is fine since we only use the mag to order the sources!\n catalog_from_obs.to_csv(filename_for_sources+\".csv\")\n if(images):\n plt.show()\n\n return converged, dic_rms #dictionary with short info about fit, \"matches\" gives a number of objects matched within certain radius",
"def image2kml(self,varname,filename=None):\n\n vdata=self.get_array(varname)\n im=self.get_image(vdata)\n if filename is None:\n filename='%s.png' % varname\n f=open(filename,'w')\n f.write(im)\n f.close()\n d=self.get_kml_dict(varname,filename)\n pylab.close('all')\n return self.__class__.kmlimage % d",
"def test_convert_csv_to_kml(self):\n import tempfile\n from pykml.util import convert_csv_to_kml\n\n # create a CSV file for testing\n csvfile = tempfile.TemporaryFile(mode='w+')\n csvfile.write('name,snippet,lat,lon\\n')\n csvfile.write('first,The first one,45.0,-90.0\\n')\n csvfile.write('second,The second one,46.0,-89.0\\n')\n csvfile.write('third,\"The third one (with quotes)\",45.0,-88.0\\n')\n csvfile.seek(0)\n\n kmlobj = convert_csv_to_kml(csvfile)\n csvfile.close()\n\n target = etree.fromstring(\n '<kml '\n 'xmlns:atom=\"http://www.w3.org/2005/Atom\" '\n 'xmlns:gx=\"http://www.google.com/kml/ext/2.2\" '\n 'xmlns=\"http://www.opengis.net/kml/2.2\">'\n '<Document>'\n '<Folder>'\n '<name>KmlFile</name>'\n '<Placemark>'\n '<name>first</name>'\n '<Snippet maxLines=\"2\">The first one</Snippet>'\n '<description>'\n '<![CDATA['\n '<table border=\"1\"'\n '<tr><th>name</th><td>first</td></tr>'\n '<tr><th>snippet</th><td>The first one</td></tr>'\n '<tr><th>lat</th><td>45.0</td></tr>'\n '<tr><th>lon</th><td>-90.0</td></tr>'\n '</table>'\n ']]>'\n '</description>'\n '<Point>'\n '<coordinates>-90.0,45.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '<Placemark>'\n '<name>second</name>'\n '<Snippet maxLines=\"2\">The second one</Snippet>'\n '<description><![CDATA[<table border=\"1\"<tr><th>name</th><td>second</td></tr><tr><th>snippet</th><td>The second one</td></tr><tr><th>lat</th><td>46.0</td></tr><tr><th>lon</th><td>-89.0</td></tr></table>]]></description>'\n '<Point>'\n '<coordinates>-89.0,46.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '<Placemark>'\n '<name>third</name>'\n '<Snippet maxLines=\"2\">The third one (with quotes)</Snippet>'\n '<description><![CDATA[<table border=\"1\"<tr><th>name</th><td>third</td></tr><tr><th>snippet</th><td>The third one (with quotes)</td></tr><tr><th>lat</th><td>45.0</td></tr><tr><th>lon</th><td>-88.0</td></tr></table>]]></description>'\n '<Point>'\n '<coordinates>-88.0,45.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '</Folder>'\n '</Document>'\n '</kml>'\n )\n self.assertTrue(compare_xml(target, kmlobj))",
"def _gtTSmap(self):\n if os.path.isfile(self.outtsmap):\n # Already exists\n return\n\n if self.csys == 'GAL':\n center_icrs = SkyCoord(ra=self.ra*u.degree, dec=self.dec*u.degree, frame='icrs')\n self.ra = center_icrs.galactic.l.deg\n self.dec = center_icrs.galactic.b.deg\n\n model = os.path.join(self.workpath, 'TSmapModel.xml') \n rfil = open(self.outmodel, 'r')\n wfil = open(model, 'w')\n isSrc = False\n isDif = False\n for line in rfil:\n if (isSrc) and ('<source name' in line):\n # Arrived to a new source, restart copying\n isSrc = False\n if (isDif) and ('<source name' in line) and ('PointSource' in line):\n isDif = False\n if 'TARGET' in line:\n isSrc = True\n if ('<source name=\"gll_iem_v06\"' in line) or ('<source name=\"iso_source_v06\"' in line): \n isDif = True\n \n if isSrc:\n # Do not copy the Target model to make it appear in the TS map\n pass\n else:\n if isDif:\n # Leave Diffuse model normalizations free\n wfil.write(line)\n else:\n # Make sur the gtlike output source model has all source parameters fixed\n wfil.write(line.replace('free=\"1\"', 'free=\"0\"'))\n rfil.close()\n wfil.close()\n\n # Launch the gttsmap tool \n if self.mode == 'binned':\n os.popen(\"gttsmap evfile={} scfile={} bexpmap={} expcube={} cmap={} srcmdl={}\\\n outfile={} evtype={} irfs=CALDB optimizer=NewMinuit statistic=BINNED ftol=1e-2\\\n coordsys={} proj=AIT nxpix={} nypix={} binsz={} xref={} yref={}\".format(self.outmktime,\n self.ft2, self.outbinexp, self.outltcube, self.outbincub, model, self.outtsmap, self.evtype,\n self.csys, self.imwid, self.imwid, self.binsz, self.ra, self.dec))\n elif self.mode == 'unbinned':\n os.popen(\"gttsmap evfile={} scfile={} expmap={} expcube={} srcmdl={}\\\n outfile={} evtype={} irfs=CALDB optimizer=NewMinuit statistic=UNBINNED ftol=1e-2\\\n coordsys={} proj=AIT nxpix={} nypix={} binsz={} xref={} yref={}\".format(self.outmktime,\n self.ft2, self.outexpmap, self.outltcube, model, self.outtsmap, self.evtype,\n self.csys, self.imwid, self.imwid, self.binsz, self.ra, self.dec))\n else:\n return\n\n if self.csys == 'GAL':\n self.ra = center_icrs.ra.deg\n self.dec = center_icrs.dec.deg\n return",
"def read_smx_fmv_12(eps_file):\n raw_data = eps_file.scaled_mdr\n raw_unscaled = eps_file.mdr\n\n n_node_per_line = raw_data[\"LONGITUDE\"].shape[1]\n n_lines = raw_data[\"LONGITUDE\"].shape[0]\n n_records = eps_file.mdr_counter * n_node_per_line\n idx_nodes = np.arange(eps_file.mdr_counter).repeat(n_node_per_line)\n\n data = {}\n metadata = {}\n\n metadata[\"spacecraft_id\"] = np.int8(eps_file.mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(eps_file.mphr[\"ORBIT_START\"])\n\n ascat_time = shortcdstime2jd(raw_data[\"UTC_LINE_NODES\"].flatten()[\"day\"],\n raw_data[\"UTC_LINE_NODES\"].flatten()[\"time\"])\n data[\"jd\"] = ascat_time[idx_nodes]\n\n fields = [(\"sigma0_trip\", long_nan), (\"inc_angle_trip\", uint_nan),\n (\"azi_angle_trip\", int_nan), (\"kp\", uint_nan),\n (\"f_land\", uint_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].reshape(n_records, 3)\n valid = raw_unscaled[f.upper()].reshape(n_records, 3) != nan_val\n data[f][~valid] = nan_val\n\n fields = [\"sat_track_azi\", \"abs_line_number\"]\n for f in fields:\n data[f] = raw_data[f.upper()].flatten()[idx_nodes]\n\n fields = [(\"longitude\", long_nan, long_nan),\n (\"latitude\", long_nan, long_nan),\n (\"swath_indicator\", byte_nan, byte_nan),\n (\"soil_moisture\", uint_nan, uint_nan),\n (\"soil_moisture_error\", uint_nan, uint_nan),\n (\"sigma40\", long_nan, long_nan),\n (\"sigma40_error\", long_nan, long_nan),\n (\"slope40\", long_nan, long_nan),\n (\"slope40_error\", long_nan, long_nan),\n (\"dry_backscatter\", long_nan, long_nan),\n (\"wet_backscatter\", long_nan, long_nan),\n (\"mean_surf_soil_moisture\", uint_nan, uint_nan),\n (\"soil_moisture_sensetivity\", ulong_nan, float32_nan),\n (\"correction_flags\", uint8_nan, uint8_nan),\n (\"processing_flags\", uint8_nan, uint8_nan),\n (\"aggregated_quality_flag\", uint8_nan, uint8_nan),\n (\"snow_cover_probability\", uint8_nan, uint8_nan),\n (\"frozen_soil_probability\", uint8_nan, uint8_nan),\n (\"innudation_or_wetland\", uint8_nan, uint8_nan),\n (\"topographical_complexity\", uint8_nan, uint8_nan)]\n\n for f, nan_val, new_nan_val in fields:\n data[f] = raw_data[f.upper()].flatten()\n valid = raw_unscaled[f.upper()].flatten() != nan_val\n data[f][~valid] = new_nan_val\n\n # sat_track_azi (uint)\n data[\"as_des_pass\"] = \\\n np.array(raw_data[\"SAT_TRACK_AZI\"].flatten()[idx_nodes] < 270)\n\n # modify longitudes from [0,360] to [-180,180]\n mask = np.logical_and(data[\"longitude\"] != long_nan,\n data[\"longitude\"] > 180)\n data[\"longitude\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n mask = (data[\"azi_angle_trip\"] != int_nan) & (data[\"azi_angle_trip\"] < 0)\n data[\"azi_angle_trip\"][mask] += 360\n\n fields = [\"param_db_version\", \"warp_nrt_version\"]\n for f in fields:\n data[f] = raw_data[\"PARAM_DB_VERSION\"].flatten()[idx_nodes]\n\n metadata[\"spacecraft_id\"] = int(eps_file.mphr[\"SPACECRAFT_ID\"][2])\n\n data[\"node_num\"] = np.tile((np.arange(n_node_per_line) + 1), n_lines)\n\n data[\"line_num\"] = idx_nodes\n\n return data, metadata",
"def eficas_translation(ts_file, new_ts_file, lang):\n dicoCataToLabel={}\n dicoCataToTelemac={}\n header = '<?xml version=\"1.0\" encoding=\"utf-8\"?>'\n header +='<!DOCTYPE TS><TS version=\"1.1\" language=\"'+lang+'\">'\n header +='<context>\\n'\n header +=' <name>@deafult</name>\\n'\n\n end ='</context>\\n</TS>\\n'\n\n pattern_In=re.compile(r'^\\s*<source>(?P<ident>.*)</source>\\s*$')\n pattern_Out=re.compile(r'^\\s*<translation>(?P<traduit>.*)</translation>\\s*$')\n pattern_In2=re.compile(r'^\\s*<source2>(?P<ident>.*)</source2>\\s*$')\n pattern_Out2=re.compile(r'^\\s*<translation2>(?P<traduit>.*)</translation2>\\s*$')\n listeMaj=[]\n listeMaj.append(('for h','for H'))\n listeMaj.append(('pour h','pour H'))\n listeMaj.append(('for u','for U'))\n listeMaj.append(('pour u','pour U'))\n listeMaj.append(('of k','of K'))\n listeMaj.append(('de k','de K'))\n listeMaj.append(('of h','of H'))\n listeMaj.append(('de h','de H'))\n listeMaj.append(('u and v','U and V'))\n listeMaj.append(('u et v','U et V'))\n listeMaj.append(('on h','on H'))\n listeMaj.append(('sur h','sur H'))\n listeMaj.append(('supg','SUPG'))\n listeMaj.append(('k and epsilon','K and Epsilon'))\n listeMaj.append(('k-epsilon','K-Epsilon'))\n listeMaj.append(('gmres','GMRES'))\n listeMaj.append(('cgstab','CGSTAB'))\n listeMaj.append(('q(z)','Q(Z)'))\n listeMaj.append(('z(q)','Z(Q)'))\n listeMaj.append(('wgs84','WGS84'))\n listeMaj.append(('wgs84','UTM'))\n listeMaj.append(('n-scheme','N-Scheme'))\n listeMaj.append(('scheme n','Scheme N'))\n listeMaj.append(('psi-scheme','PSI-Scheme'))\n listeMaj.append((' psi',' PSI'))\n listeMaj.append(('f(t90)','F(T90)'))\n listeMaj.append(('(pa)','(Pa)'))\n listeMaj.append(('h clipping','H clipping'))\n listeMaj.append(('delwaq','DELWAQ'))\n listeMaj.append(('tomawac','TOMAWAC'))\n listeMaj.append(('chezy','CHEZY'))\n listeMaj.append(('hllc','HLLC'))\n listeMaj.append(('c-u','C-U'))\n listeMaj.append(('c,u,v','C,U,V'))\n listeMaj.append(('h,u,v','H,U,V'))\n listeMaj.append(('previmer','PREVIMER'))\n listeMaj.append(('fes20xx','FES20XX'))\n listeMaj.append(('legos-nea','LEGOS-NEA'))\n listeMaj.append(('tpxo','TPXO'))\n listeMaj.append((' x',' X'))\n listeMaj.append((' y',' Y'))\n listeMaj.append(('waf','WAF'))\n listeMaj.append(('(w/kg)','(W/kg)'))\n listeMaj.append(('(j/kg)','(W/kg)'))\n listeMaj.append(('zokagoa','Zokagoa'))\n listeMaj.append(('nikuradse','Nikuradse'))\n listeMaj.append(('froude','Froude'))\n listeMaj.append(('gauss','Gauss'))\n listeMaj.append(('seidel','Seidel'))\n listeMaj.append(('leo','Leo'))\n listeMaj.append(('postma','Postma'))\n listeMaj.append(('crout','Crout'))\n listeMaj.append(('okada','Okada'))\n listeMaj.append(('jmj','JMJ'))\n listeMaj.append(('haaland','HAALAND'))\n listeMaj.append(('grad(u)','grad(U)'))\n listeMaj.append(('variable z','variable Z'))\n listeMaj.append(('variable r','variable R'))\n listeMaj.append(('ascii','ASCII'))\n\n with open(ts_file, 'r') as f:\n for ligne in f.readlines():\n if pattern_In.match(ligne):\n m = pattern_In.match(ligne)\n ident = m.group('ident')\n if pattern_Out.match(ligne):\n m = pattern_Out.match(ligne)\n traduit = m.group('traduit')\n dicoCataToTelemac[ident] = traduit\n traduitMin = traduit.lower()\n for t in listeMaj :\n traduit = traduitMin.replace(t[0], t[1])\n traduitMin = traduit\n chaine = traduitMin[0].upper() + traduitMin[1:]\n dicoCataToLabel[ident] = chaine\n if pattern_In2.match(ligne):\n m = pattern_In2.match(ligne)\n ident = m.group('ident')\n if pattern_Out2.match(ligne):\n m = pattern_Out2.match(ligne)\n traduit = m.group('traduit')\n dicoCataToTelemac[ident] = traduit\n dicoCataToLabel[ident] = traduit\n\n with open(new_ts_file, 'w') as f:\n f.write(header)\n for k in dicoCataToTelemac :\n text = \" <message>\\n <source>\"\n text += k\n text += \"</source>\\n <translation>\"\n text += dicoCataToLabel[k]\n text += \"</translation>\\n </message>\\n\"\n f.write(text)\n f.write(end)",
"def zoo_import(name, head=''):\n net = gz.get_model(name, pretrained=True)\n export_block(head + name, net, preprocess=True)",
"def gauges2kml(rundata=None, fname='gauges.kml', verbose=True):\n\n\n if rundata is None:\n try:\n import setrun\n reload(setrun)\n rundata = setrun.setrun()\n except:\n raise IOError(\"*** cannot execute setrun file\")\n\n elev = 0.\n kml_text = kml_header(fname)\n\n\n gauges = rundata.gaugedata.gauges\n if len(gauges)==0 and verbose:\n print(\"No gauges found in setrun.py\")\n\n\n for rnum,gauge in enumerate(gauges):\n t1,t2 = gauge[3:5]\n x1,y1 = gauge[1:3]\n gaugeno = gauge[0]\n if verbose:\n print(\"Gauge %i: %s, %s \\n\" % (gaugeno,f2s(x1),f2s(y1)) \\\n + \" t1 = %s, t2 = %s\" % (f2s(t1),f2s(t2)))\n mapping = {}\n mapping['gaugeno'] = gaugeno\n mapping['t1'] = t1\n mapping['t2'] = t2\n mapping['x1'] = x1\n mapping['y1'] = y1\n mapping['elev'] = elev\n mapping['name'] = 'Gauge %i' % rnum\n description = \" t1 = %s, t2 = %s\\n\" % (f2s(t1),f2s(t2)) \\\n + \" x1 = %s, y1 = %s\\n\" % (f2s(x1),f2s(y1))\n mapping['desc'] = description\n\n gauge_text = kml_gauge(mapping)\n kml_text = kml_text + gauge_text\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def write(self,vname,kmz='out.kmz'):\n\n imgs=[] # to store a list of all images created\n content=[] # the content of the main kml\n vstr='files/%s_%05i.png' # format specification for images (all stored in `files/' subdirectory)\n\n # create empty files subdirectory for output images\n try:\n shutil.rmtree('files')\n except:\n pass\n os.makedirs('files')\n\n # loop through all time slices and create the image data\n # appending to the kml content string for each image\n for i in xrange(0,self.nstep,1):\n kml=ncNWRC(self.filename,istep=i)\n img=vstr % (vname,i)\n imgs.append(img)\n content.append(kml.image2kml(vname,img))\n\n # create the main kml file\n kml=ncNWRC.kmlstr % \\\n {'content':'\\n'.join(content),\\\n 'prog':ncNWRC.progname}\n\n # create a zipfile to store all images + kml into a single compressed file\n z=zipfile.ZipFile(kmz,'w',compression=zipfile.ZIP_DEFLATED)\n z.writestr(kmz[:-3]+'kml',kml)\n for img in imgs:\n z.write(img)\n z.close()",
"def prepare_ozi(mbbox, mwidth, mheight, name, transform):\n def deg(value, is_lon):\n degrees = math.floor(abs(value))\n minutes = (abs(value) - degrees) * 60\n return '{:4d},{:3.5F},{}'.format(\n int(round(degrees)), minutes,\n ('W' if is_lon else 'S') if value < 0 else ('E' if is_lon else 'N'))\n\n ozipoint = ('Point{:02d},xy, , ,in, deg, , ,N, , ,E' +\n ', grid, , , ,N')\n bbox = transform.backward(mbbox)\n points = \"\\n\".join([ozipoint.format(n) for n in range(3, 31)])\n header = '''OziExplorer Map Data File Version 2.2\nNik4\n{name}\n1 ,Map Code,\nWGS 84,WGS 84, 0.0000, 0.0000,WGS 84\nReserved 1\nReserved 2\nMagnetic Variation,,,E\nMap Projection,Mercator,PolyCal,No,AutoCalOnly,No,BSBUseWPX,No\nPoint01,xy, 0, 0,in, deg,{top},{left}, grid, , , ,N\nPoint02,xy, {width:4d}, {height:4d},in, deg,{bottom},{right}, grid, , , ,N\n{points}\nProjection Setup,,,,,,,,,,\nMap Feature = MF ; Map Comment = MC These follow if they exist\nTrack File = TF These follow if they exist\nMoving Map Parameters = MM? These follow if they exist\nMM0,Yes\nMMPNUM,4\nMMPXY,1,0,0\n'''.format(name=name,\n top=deg(bbox.maxy, False),\n left=deg(bbox.minx, True),\n width=mwidth - 1,\n height=mheight - 1,\n bottom=deg(bbox.miny, False),\n right=deg(bbox.maxx, True),\n points=points)\n return ''.join([\n header,\n \"MMPXY,2,{},0\\n\".format(mwidth),\n \"MMPXY,3,{},{}\\n\".format(mwidth, mheight),\n \"MMPXY,4,0,{}\\n\".format(mheight),\n 'MMPLL,1,{:4.6f},{:4.6f}\\n'.format(bbox.minx, bbox.maxy),\n 'MMPLL,2,{:4.6f},{:4.6f}\\n'.format(bbox.maxx, bbox.maxy),\n 'MMPLL,3,{:4.6f},{:4.6f}\\n'.format(bbox.maxx, bbox.miny),\n 'MMPLL,4,{:4.6f},{:4.6f}\\n'.format(bbox.minx, bbox.miny),\n \"MM1B,{}\\n\".format((mbbox.maxx - mbbox.minx) / mwidth * math.cos(\n math.radians(bbox.center().y))),\n \"MOP,Map Open Position,0,0\\n\",\n \"IWH,Map Image Width/Height,{},{}\\n\".format(mwidth, mheight),\n ])",
"def writer(output, output_name, output_data):\n\n kml = simplekml.Kml(name=output_name)\n for exif in output_data:\n if('Latitude' in exif.keys() and\n 'Latitude Reference' in exif.keys() and\n 'Longitude Reference' in exif.keys() and\n 'Longitude' in exif.keys()):\n\n if 'Original Date' in exif.keys():\n dt = exif['Original Date']\n else:\n dt = 'N/A'\n\n if exif['Latitude Reference'] == 'S':\n latitude = '-' + exif['Latitude']\n else:\n latitude = exif['Latitude']\n\n if exif['Longitude Reference'] == 'W':\n longitude = '-' + exif['Longitude']\n else:\n longitude = exif['Longitude']\n\n kml.newpoint(name=exif['Name'],\n description='Originally Created: ' + dt,\n coords=[(longitude, latitude)])\n else:\n pass\n kml.save(os.path.join(output, output_name))",
"def convertIcdar2013Localization(dataDir, outPrefix, objectives, imgExt='jpg',\n gtPrefix='gt_', gtExt='txt'):\n\n imgFileList = [ff for ff in os.listdir(dataDir)\n if re.search('.'+imgExt+'$', ff)]\n gtFileList = [ff for ff in os.listdir(dataDir)\n if re.search('^'+gtPrefix+'\\w*.'+gtExt+'$', ff)]\n wordList = getIcdar2013WordList(dataDir, gtFileList)\n\n lenList, charMat = wordsToChars(wordList)\n outFilenames = makeLabelFiles(objectives, dataDir, imgFileList, lenList,\n charMat, outPrefix)\n return outFilenames",
"def make_json_airspace_format(self):\n # The previous fct make_open_airspace_format already stored, coordinates_kml, name and type\n # This data is collected in an dictionary, which then is stored as json.\n # initialize dict\n coordinates_as_list_of_floats = []\n # run through coordinates\n coordinates_as_list_of_floats = []\n for coo_pt in self.coordinates_kml.split(' ')[:-1]:\n lat_long = coo_pt.split(',')\n coordinates_as_list_of_floats.append([float(lat_long[1]), float(lat_long[0])])\n # make json dict\n # rename name if not thermal space\n if self.name.startswith('TS_') and not (self.as_type == 'A' or self.as_type == 'B'):\n name_for_json = self.name[3:]\n else:\n name_for_json = self.name\n # rename airspace type for json:\n if self.as_type == 'A':\n self.as_type = 'Good_thermals'\n if self.as_type == 'B':\n self.as_type = 'Bad_thermals'\n self.json_dict = {\"AL\": \"FL98\", \"AH\": \"FL99\", \"AC\": self.as_type, \"AN\": name_for_json, \"data\": coordinates_as_list_of_floats}",
"def utt_to_scene(file_name):\n with open(file_name, 'r') as file:\n data = file.readlines()\n data = [line.strip().split() for line in data if line.strip() != '']\n data = [[line[0], \" \".join(line[1:])] for line in data]\n preproc_data = [[line[0], list(map(lambda x: x[:x.find(\":\")], line[1].split(',')))[:-1]] for line in data]\n scene_mapping = {line[0]: line[1] for line in preproc_data}\n return scene_mapping",
"def read_szx_fmv_12(eps_file):\n raw_data = eps_file.scaled_mdr\n raw_unscaled = eps_file.mdr\n mphr = eps_file.mphr\n\n n_node_per_line = raw_data[\"LONGITUDE\"].shape[1]\n n_lines = raw_data[\"LONGITUDE\"].shape[0]\n n_records = raw_data[\"LONGITUDE\"].size\n\n data = {}\n metadata = {}\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n ascat_time = shortcdstime2jd(raw_data[\"UTC_LINE_NODES\"].flatten()[\"day\"],\n raw_data[\"UTC_LINE_NODES\"].flatten()[\"time\"])\n data[\"jd\"] = ascat_time[idx_nodes]\n\n metadata[\"spacecraft_id\"] = np.int8(mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(mphr[\"ORBIT_START\"])\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n\n for f in fields:\n metadata[f] = np.int16(mphr[f.upper()])\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"abs_line_number\"\n ]\n\n for f in fields:\n data[f] = raw_data[f.upper()].flatten()[idx_nodes]\n\n fields = [(\"longitude\", long_nan), (\"latitude\", long_nan),\n (\"swath indicator\", byte_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].flatten()\n valid = raw_unscaled[f.upper()].flatten() != nan_val\n data[f][~valid] = nan_val\n\n fields = [(\"sigma0_trip\", long_nan), (\"inc_angle_trip\", uint_nan),\n (\"azi_angle_trip\", int_nan), (\"kp\", uint_nan),\n (\"num_val_trip\", ulong_nan), (\"f_kp\", byte_nan),\n (\"f_usable\", byte_nan), (\"f_f\", uint_nan), (\"f_v\", uint_nan),\n (\"f_oa\", uint_nan), (\"f_sa\", uint_nan), (\"f_tel\", uint_nan),\n (\"f_ref\", uint_nan), (\"f_land\", uint_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].reshape(n_records, 3)\n valid = raw_unscaled[f.upper()].reshape(n_records, 3) != nan_val\n data[f][~valid] = nan_val\n\n # modify longitudes from (0, 360) to (-180,180)\n mask = np.logical_and(data[\"longitude\"] != long_nan,\n data[\"longitude\"] > 180)\n data[\"longitude\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n mask = (data[\"azi_angle_trip\"] != int_nan) & (data[\"azi_angle_trip\"] < 0)\n data[\"azi_angle_trip\"][mask] += 360\n\n data[\"node_num\"] = np.tile((np.arange(n_node_per_line) + 1),\n n_lines).astype(np.uint8)\n\n data[\"line_num\"] = idx_nodes.astype(np.uint16)\n\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n data[\"swath_indicator\"] = data.pop(\"swath indicator\")\n\n return data, metadata",
"def get_kml_dict(self, tx, ty_tms, tz, image_format, draworder = 0):\n d = {}\n\n d[\"south\"], d[\"west\"], d[\"north\"], d[\"east\"] = self.tileswne(tx, ty_tms, tz)\n\n image_filename = get_tile_filename(tx, ty_tms, tz, format_extension[image_format],False)\n d[\"image_filename\"] = image_filename\n d[\"image_filename\"] = d[\"image_filename\"].replace(\"\\\\\",\"/\")\n\n if self.options.url is None:\n d[\"image_url\"] = \"../../%s\" % image_filename\n else:\n d[\"image_url\"] = \"%s%s\" % (self.options.url, image_filename)\n d[\"image_url\"] = d[\"image_url\"].replace(\"\\\\\",\"/\")\n\n url = self.options.url\n if url is None:\n # Top level KML is linked from `doc.kml' and it needs different path.\n if tz == self.tminz:\n url = \"\"\n else:\n url = \"../../\"\n\n if self.options.kmz:\n extension = \"kmz\"\n else:\n extension = \"kml\"\n\n d[\"link_url\"] = \"%s%s\" % (url, get_tile_filename(tx, ty_tms, tz, extension,False))\n d[\"link_url\"] = d[\"link_url\"].replace(\"\\\\\",\"/\")\n\n d[\"minlodpixels\"] = int(self.tilesize / 2)\n d[\"maxlodpixels\"] = -1 # int(self.tilesize * 8)\n\n if tx == 0:\n d[\"draw_order\"] = draworder + 2 * tz + 1\n else:\n d[\"draw_order\"] = draworder + 2 * tz\n\n return d",
"def make_e3sm_to_cmip_maps(config, logger, mesh_short_name, creation_date,\n ntasks):\n\n link_dir = '../assembled_files/diagnostics/maps'\n\n try:\n os.makedirs(link_dir)\n except FileExistsError:\n pass\n\n src_scrip_filename = 'ocean.scrip.nc'\n cmip6_grid_res = config.get('files_for_e3sm', 'cmip6_grid_res')\n if cmip6_grid_res == '180x360':\n dst_scrip_filename = 'cmip6_180x360_scrip.20181001.nc'\n elif cmip6_grid_res == '720x1440':\n dst_scrip_filename = 'cmip6_720x1440_scrip.20181001.nc'\n else:\n raise ValueError(f'Unexpected cmip6_grid_res: {cmip6_grid_res}')\n\n parallel_executable = config.get('parallel', 'parallel_executable')\n # split the parallel executable into constituents in case it includes flags\n parallel_command = parallel_executable.split(' ')\n parallel_system = config.get('parallel', 'system')\n if parallel_system == 'slurm':\n parallel_command.extend(['-n', f'{ntasks}'])\n elif parallel_system == 'single_node':\n if ntasks > 1:\n parallel_command.extend(['-n', f'{ntasks}'])\n else:\n raise ValueError(f'Unexpected parallel system: {parallel_system}')\n parallel_command = ' '.join(parallel_command)\n\n map_methods = dict(aave='conserve', mono='fv2fv_flx', nco='nco')\n for suffix, map_method in map_methods.items():\n local_map_filename = f'map_mpas_to_cmip6_{suffix}.nc'\n args = ['ncremap', f'--mpi_pfx={parallel_command}',\n f'--alg_typ={map_method}',\n f'--grd_src={src_scrip_filename}',\n f'--grd_dst={dst_scrip_filename}',\n f'--map={local_map_filename}']\n check_call(args, logger=logger)\n\n map_filename = \\\n f'map_{mesh_short_name}_to_cmip6_{cmip6_grid_res}_{suffix}.{creation_date}.nc' # noqa: E501\n\n symlink(os.path.abspath(local_map_filename),\n f'{link_dir}/{map_filename}')",
"def funcion_escribe_kml():\n\n DB = \"geoinfo\" # default database name\n LOGIN = \"gast\" # default login\n PASSWORD = \"gast\" # default password\n\n cnx = MySQLdb.connect(db=DB, user=LOGIN, passwd=PASSWORD)\n cursor = cnx.cursor()\n\n cursor.execute(\"SELECT * from wlan order by essid\")\n results = cursor.fetchall()\n\n print \"Total APs: %s\" % len(results) # print total AP count\n\n f = open(sys.argv[1], 'w')\n f.write('<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n')\n f.write('<kml xmlns=\"http://earth.google.com/kml/2.2\">\\n')\n f.write(' <Folder>\\n')\n f.write(' <name>GpsDrive+Kismet wifis</name>\\n')\n # By default folder is showed\n f.write(' <visibility>1</visibility>\\n')\n # GpsDrive icon\n f.write(' <ScreenOverlay>\\n')\n f.write(' <name>Info</name>\\n')\n f.write(' <description>Wifi data</description>\\n')\n f.write(' <visibility>1</visibility>\\n')\n f.write(' <Icon>\\n')\n f.write(' <href>https://raw.github.com/rodrigorega/GpsDriveToGoogleEarth/master/img/gpsdrivelogo.png</href>\\n')\n f.write(' </Icon>\\n')\n f.write(' <overlayXY x=\"0\" y=\"-1\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <screenXY x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <rotationXY x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <size x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' </ScreenOverlay>')\n\n # write all APs to .kml file\n for line in results:\n name = line[6].replace('&', 'and') # To avoid Google Earth errors\n wep = line[8]\n lat = line[1]\n lon = line[2]\n mac = line[5]\n\n f.write('\\n')\n f.write(' <Placemark>\\n')\n f.write(' <name>%s</name>\\n' % name)\n f.write(' <description>')\n f.write(' <![CDATA[ <table width=\"300\"><tr><td>')\n f.write(' - EESID: %s\\n <br />' % name)\n f.write(' - BBSID: %s\\n <br />' % mac)\n tipo_ap = funcion_tipo_ap(wep)\n f.write(' - Security: %s\\n <br />' % tipo_ap)\n f.write(' - GPS coords.: %s, %s\\n <br />' % (lon, lat))\n f.write(' </td></tr></table> ]]>')\n f.write(' </description>\\n')\n f.write(' <visibility>1</visibility>\\n')\n\n tipo_ap = funcion_tipo_ap(wep) # get AP type\n\n # Draw AP icon\n f.write('<Style>')\n f.write('<IconStyle>')\n f.write(' <Icon><href>https://raw.github.com/rodrigorega/GpsDriveToGoogleEarth/master/img/%s.png</href></Icon>\\n' % tipo_ap)\n f.write('</IconStyle>')\n f.write('</Style>')\n f.write(' <Point><coordinates>%s,%s,45</coordinates></Point>\\n' % (lon, lat))\n f.write(' </Placemark>\\n')\n\n f.write(' </Folder>\\n')\n f.write('</kml>')",
"def txt2hdf5_mudis(config, init_file=0, final_file=100, step=1, expo='100'):\n # --------SKYMAP--------------\n # Create the directory to save the results\n os.makedirs(os.path.dirname(cwd + '/config_files/'), exist_ok=True)\n\n alignment = add_align()\n\n # Extract skymap from alignment file\n skymap = np.zeros((len(alignment), 2))\n\n for i in np.arange(len(skymap)):\n skymap[i] = alignment['Azimuth'][i], alignment['Zenith'][i]\n\n # Save Skymap information\n with h5py.File(cwd + '/config_files/skymap_radiance.h5', 'w') as sky:\n\n if not list(sky.items()):\n sky.create_dataset('/skymap', data=skymap)\n else:\n del sky['skymap']\n\n sky.create_dataset('/skymap', data=skymap, dtype='f4')\n sky['skymap'].attrs['Columns'] = 'Azimuth, Zenith'\n sky['skymap'].dims[0].label = 'channel'\n sky['skymap'].dims[1].label = 'Azimuth, Zenith'\n\n config['skymap'] = skymap\n\n # Save MUDIS file information\n\n # Import the radiance data from sensor\n files = sorted(\n glob.glob(config['raw_dir'] + '/radiance/{}/data/data_*.txt'.format(config['date'])))\n\n print('Total files in the directory: ' + str(len(files)) + ' files')\n\n ans = input('convert all files? (y/n): ')\n\n if ans == 'n':\n print('configure initial and final file index in the function options')\n else:\n init_file = 0\n final_file = len(files)\n\n for fil in np.arange(init_file, final_file):\n # Import the data from the file\n file = np.genfromtxt(files[fil], delimiter='', skip_header=11)\n\n # ------------RADIANCE DATA RAW---------------\n # create the radiance matrix\n data = np.zeros([113, 992])\n\n for i in np.arange(113):\n if str(alignment.iloc[i][3]) == 'nan':\n data[i] = np.nan\n else:\n try:\n data[i] = file[:, int(alignment.iloc[i][3] + config['channel_pixel_adj'])] #\n except:\n pass #\n # read the pixels index\n # in the alignment file and copy the\n # data in the radiance matrix']))\n\n # Correct time for the file UTC\n name = os.path.split(files[fil])\n\n # Read name of the file (correct time)\n time = name[1][6:25]\n # convert time to datetime format\n time = datetime.datetime.strptime(time, '%d.%m.%Y_%H_%M_%S')\n # print(time)\n new_name = datetime.datetime.strftime(time, '%Y%m%d_%H%M%S')\n\n with open(files[fil], 'r') as file:\n dat = file.readlines()\n\n # Extract information from .dat file\n exposure = int(dat[4][12:-1])\n NumAve = int(dat[7][17:-1])\n CCDTemp = int(dat[8][15:-1])\n NumSingMes = int(dat[10][27:-1])\n ElectrTemp = int(dat[9][23:-1])\n\n # Create the directory to save the results\n os.makedirs(os.path.dirname(config['str_dir'] + '/radiance/{}/data/').format(config['date']),\n exist_ok=True)\n\n if exposure == expo:\n # Create a file in the disk\n with h5py.File(config['str_dir'] + '/radiance/{}/data/{}.h5'.format(config['date'], new_name),\n 'w') as datos:\n\n if not list(datos.items()):\n # Create two datasets(use only one time)\n datos.create_dataset('/data', data=data, dtype='f4')\n datos.create_dataset('/skymap', data=skymap, dtype='f4')\n else:\n del datos['data']\n del datos['skymap']\n print('data deleted and corrected')\n datos.create_dataset('/data', data=data, dtype='f4')\n datos.create_dataset('/skymap', data=skymap, dtype='f4')\n\n # Add attributes to datasets\n datos['data'].attrs['time'] = str(time)\n datos['data'].attrs['Exposure'] = exposure\n datos['data'].attrs['NumAver'] = NumAve\n datos['data'].attrs['CCDTemp'] = CCDTemp\n datos['data'].attrs['NumSingMes'] = NumSingMes\n datos['data'].attrs['ElectrTemp'] = ElectrTemp\n datos['data'].attrs['Latitude'] = '52.39N'\n datos['data'].attrs['Longitude'] = '9.7E'\n datos['data'].attrs['Altitude'] = '65 AMSL'\n datos['data'].dims[0].label = 'channel'\n datos['data'].dims[1].label = 'wavelength'\n\n datos['skymap'].attrs['Columns'] = 'Azimuth, Zenith'\n\n datos.close()\n\n print('File ' + str(fil + init_file + 1) + ' of ' +\n str((final_file - init_file)) + ' saved')\n else:\n print('Exposure are not same', expo, exposure)\n break\n\n print('Completed')",
"def from_enmap(emap):\n\n new_map = so_map()\n hdulist = emap.wcs.to_fits()\n header = hdulist[0].header\n new_map.pixel = header[\"CTYPE1\"][-3:]\n try:\n new_map.ncomp = header[\"NAXIS3\"]\n except:\n new_map.ncomp = 1\n new_map.data = emap.copy()\n new_map.nside = None\n new_map.geometry = new_map.data.geometry[1:]\n new_map.coordinate = header[\"RADESYS\"]\n if new_map.coordinate == \"ICRS\":\n new_map.coordinate = \"equ\"\n\n return new_map",
"def read_szx_fmv_13(eps_file):\n raw_data = eps_file.scaled_mdr\n raw_unscaled = eps_file.mdr\n mphr = eps_file.mphr\n\n n_node_per_line = raw_data[\"LONGITUDE\"].shape[1]\n n_lines = raw_data[\"LONGITUDE\"].shape[0]\n n_records = raw_data[\"LONGITUDE\"].size\n\n data = {}\n metadata = {}\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n ascat_time = shortcdstime2jd(raw_data[\"UTC_LINE_NODES\"].flatten()[\"day\"],\n raw_data[\"UTC_LINE_NODES\"].flatten()[\"time\"])\n data[\"jd\"] = ascat_time[idx_nodes]\n\n metadata[\"spacecraft_id\"] = np.int8(mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(mphr[\"ORBIT_START\"])\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n\n for f in fields:\n metadata[f] = np.int16(mphr[f.upper()])\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"abs_line_number\"\n ]\n\n for f in fields:\n data[f] = raw_data[f.upper()].flatten()[idx_nodes]\n\n fields = [(\"longitude\", long_nan), (\"latitude\", long_nan),\n (\"swath indicator\", byte_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].flatten()\n valid = raw_unscaled[f.upper()].flatten() != nan_val\n data[f][~valid] = nan_val\n\n fields = [(\"sigma0_trip\", long_nan), (\"inc_angle_trip\", uint_nan),\n (\"azi_angle_trip\", int_nan), (\"kp\", uint_nan),\n (\"num_val_trip\", ulong_nan), (\"f_kp\", byte_nan),\n (\"f_usable\", byte_nan), (\"land_frac\", uint_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].reshape(n_records, 3)\n valid = raw_unscaled[f.upper()].reshape(n_records, 3) != nan_val\n data[f][~valid] = nan_val\n\n # modify longitudes from (0, 360) to (-180,180)\n mask = np.logical_and(data[\"longitude\"] != long_nan,\n data[\"longitude\"] > 180)\n data[\"longitude\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n mask = (data[\"azi_angle_trip\"] != int_nan) & (data[\"azi_angle_trip\"] < 0)\n data[\"azi_angle_trip\"][mask] += 360\n\n data[\"node_num\"] = np.tile((np.arange(n_node_per_line) + 1),\n n_lines).astype(np.uint8)\n\n data[\"line_num\"] = idx_nodes.astype(np.uint16)\n\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n data[\"swath_indicator\"] = data.pop(\"swath indicator\")\n\n data[\"f_land\"] = data.pop(\"land_frac\")\n\n return data, metadata",
"def load_ktrans(path):\n ext = \"-Ktrans.mhd\"\n img = sitk.ReadImage(path + ext)\n\n # Order z,y,x\n arr = sitk.GetArrayFromImage(img)\n origin = np.array(list(reversed(img.GetOrigin())))\n scale = np.array(list(reversed(img.GetSpacing())))\n arr = rescale_arr(arr, scale)\n\n return arr",
"def __load_topography__(filepath):\n\tfrom clawpack.geoclaw import topotools\n\ttopo = topotools.Topography(filepath)\n\t\n\tif TESTING:\n\t\timport matplotlib.pyplot as plt\n\t\ttopo.plot()\n\t\tplt.show()\n\ttopo.topo_type = 3\n\txgrid = topo.X\n\tygrid = topo.Y\n\tzgrid = topo.Z\n\t\n\t#temp; find a better solution (e.g. convert from lat/lon to actual space)\n\t#xgrid = 1.e4 * xgrid\n\t#ygrid = 1.e4 * ygrid\n\t\n\t#test only\n\tshape = zgrid.shape\n\tny, nx = shape[0], shape[1]\n\t#for iy in range(0,ny):\n\t\t#zgrid[iy, 0] = zgrid[iy,0]+1e4\n\t#for ix in range(0,nx):\n\t\t#zgrid[1, ix] = zgrid[1,ix]-1e4\n\t\n\tdef wavy(x, y):\n\t\treturn np.sin(0.2*np.pi*x)*np.cos(0.4*np.pi*y)\n\t\n\twavyz = wavy(xgrid, ygrid)\n\t\n\t\n\tfor ix in range(0,0):\n\t\tfor iy in range(0,0):\n\t\t\tzgrid[iy, ix] = 1e4*wavyz[iy, ix]\n\t\n\tzgrid = 1e-4 * zgrid\n\t\n\treturn (xgrid, ygrid, zgrid)",
"def k2g(\n kml_path_or_buffer,\n output_dir,\n feature_collection_name,\n style_type,\n style_filename,\n separate_folders,\n):\n style, *layers = m.convert(\n kml_path_or_buffer,\n style_type=style_type,\n separate_folders=separate_folders,\n feature_collection_name=feature_collection_name,\n )\n\n # Create output directory if it doesn't exist\n output_dir = pl.Path(output_dir)\n if not output_dir.exists():\n output_dir.mkdir(parents=True)\n output_dir = output_dir.resolve()\n\n # Write style file\n path = output_dir / style_filename\n with path.open(\"w\") as tgt:\n json.dump(style, tgt)\n\n # Create filenames for layers\n stems = m.disambiguate(m.to_filename(layer[\"name\"]) for layer in layers)\n filenames = [f\"{stem}.geojson\" for stem in stems]\n\n # Write layer files\n for i in range(len(layers)):\n path = output_dir / filenames[i]\n with path.open(\"w\") as tgt:\n json.dump(layers[i], tgt)",
"def saveKML(kmlFile):\n\n tilePath = os.path.basename('map-NYC_heatmap.png')\n north = topLeftLat\n south = bottomRightLat\n east = topLeftLon\n west = bottomRightLon\n \n bytes = KML % (tilePath, north, south, east, west)\n file(kmlFile, \"w\").write(bytes)",
"def preprocess_osm(\n src_file, dst_dir, dst_crs, dst_shape, dst_transform, geom=None, overwrite=False\n):\n log.info(\"Starting preprocessing of OSM data.\")\n for theme in (\"roads\", \"health\", \"water\", \"ferry\"):\n dst_file = os.path.join(dst_dir, f\"{theme}.gpkg\")\n if os.path.isfile(dst_file) and not overwrite:\n log.info(f\"{os.path.basename(dst_file)} already exists. Skipping.\")\n continue\n try:\n thematic_extract(src_file, theme, dst_file)\n except MissingDataError:\n log.warning(\n f\"Skipping extraction of `{theme}` objects due to missing data.\"\n )\n osm_water = os.path.join(dst_dir, \"water.gpkg\")\n dst_file = os.path.join(dst_dir, \"water_osm.tif\")\n create_water_raster(\n osm_water,\n dst_file,\n dst_crs,\n dst_shape,\n dst_transform,\n include_streams=False,\n geom=geom,\n overwrite=overwrite,\n )\n if geom:\n mask_raster(dst_file, geom)",
"def read(filepath):\n # Core Library modules\n import xml.etree.ElementTree\n\n root = xml.etree.ElementTree.parse(filepath).getroot()\n\n # Get the raw data\n recording = []\n strokes = sorted(\n root.findall(\"{http://www.w3.org/2003/InkML}trace\"),\n key=lambda child: int(child.attrib[\"id\"]),\n )\n time = 0\n for stroke in strokes:\n stroke = stroke.text.strip().split(\",\")\n stroke = [point.strip().split(\" \") for point in stroke]\n if len(stroke[0]) == 3:\n stroke = [\n {\"x\": float(x), \"y\": float(y), \"time\": float(t)} for x, y, t in stroke\n ]\n else:\n stroke = [{\"x\": float(x), \"y\": float(y)} for x, y in stroke]\n new_stroke = []\n for p in stroke:\n new_stroke.append({\"x\": p[\"x\"], \"y\": p[\"y\"], \"time\": time})\n time += 20\n stroke = new_stroke\n time += 200\n recording.append(stroke)\n\n # Get LaTeX\n formula_in_latex = None\n annotations = root.findall(\"{http://www.w3.org/2003/InkML}annotation\")\n for annotation in annotations:\n if annotation.attrib[\"type\"] == \"truth\":\n formula_in_latex = annotation.text\n hw = handwritten_data.HandwrittenData(\n json.dumps(recording), formula_in_latex=formula_in_latex\n )\n for annotation in annotations:\n if annotation.attrib[\"type\"] == \"writer\":\n hw.writer = annotation.text\n elif annotation.attrib[\"type\"] == \"category\":\n hw.category = annotation.text\n elif annotation.attrib[\"type\"] == \"expression\":\n hw.expression = annotation.text\n\n # Get segmentation\n segmentation = []\n trace_groups = root.findall(\"{http://www.w3.org/2003/InkML}traceGroup\")\n if len(trace_groups) != 1:\n raise Exception(\n \"Malformed InkML\",\n (\n \"Exactly 1 top level traceGroup expected, found %i. \"\n \"(%s) - probably no ground truth?\"\n )\n % (len(trace_groups), filepath),\n )\n trace_group = trace_groups[0]\n symbol_stream = [] # has to be consistent with segmentation\n for tg in trace_group.findall(\"{http://www.w3.org/2003/InkML}traceGroup\"):\n annotations = tg.findall(\"{http://www.w3.org/2003/InkML}annotation\")\n if len(annotations) != 1:\n raise ValueError(\n \"%i annotations found for '%s'.\" % (len(annotations), filepath)\n )\n db_id = formula_to_dbid(normalize_symbol_name(annotations[0].text))\n symbol_stream.append(db_id)\n trace_views = tg.findall(\"{http://www.w3.org/2003/InkML}traceView\")\n symbol = []\n for traceView in trace_views:\n symbol.append(int(traceView.attrib[\"traceDataRef\"]))\n segmentation.append(symbol)\n hw.symbol_stream = symbol_stream\n hw.segmentation = segmentation\n _flat_seg = [stroke2 for symbol2 in segmentation for stroke2 in symbol2]\n assert len(_flat_seg) == len(\n recording\n ), \"Segmentation had length %i, but recording has %i strokes (%s)\" % (\n len(_flat_seg),\n len(recording),\n filepath,\n )\n assert set(_flat_seg) == set(range(len(_flat_seg))), (\n f\"set(_flat_seg) = {set(_flat_seg)} !=\"\n f\"{set(range(len(_flat_seg)))} = set(range(len(_flat_seg)))\"\n )\n hw.inkml = beautify_xml(filepath)\n hw.filepath = filepath\n return hw",
"def read_szf_fmv_12(eps_file):\n data = {}\n metadata = {}\n\n n_lines = eps_file.mdr_counter\n n_node_per_line = eps_file.mdr[\"LONGITUDE_FULL\"].shape[1]\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n # extract metadata\n metadata[\"spacecraft_id\"] = np.int8(eps_file.mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(eps_file.mphr[\"ORBIT_START\"])\n metadata[\"state_vector_time\"] = datetime.strptime(\n eps_file.mphr[\"STATE_VECTOR_TIME\"][:-4], \"%Y%m%d%H%M%S\")\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n for f in fields:\n metadata[f] = np.int16(eps_file.mphr[f.upper()])\n\n # extract time\n dt = np.datetime64(\n \"2000-01-01\") + eps_file.mdr[\"UTC_LOCALISATION\"][\"day\"].astype(\n \"timedelta64[D]\"\n ) + eps_file.mdr[\"UTC_LOCALISATION\"][\"time\"].astype(\"timedelta64[ms]\")\n data[\"time\"] = dt[idx_nodes]\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"beam_number\", \"flagfield_rf1\", \"flagfield_rf2\", \"flagfield_pl\",\n \"flagfield_gen1\"\n ]\n\n # 101 min = 6082 seconds\n # state_vector_time = ascending node crossing time - 1520.5,\n # time crossing at -90 lat\n orbit_start_time = metadata[\"state_vector_time\"] - timedelta(\n seconds=1520.5)\n orbit_end_time = orbit_start_time + timedelta(seconds=6082)\n\n data[\"orbit_nr\"] = np.ma.zeros(\n data[\"time\"].size, dtype=np.int32,\n fill_value=int32_nan) + metadata[\"orbit_start\"]\n data[\"orbit_nr\"][data[\"time\"] > orbit_end_time] += 1\n\n metadata[\"orbits\"] = {}\n for orbit_nr in np.unique(data[\"orbit_nr\"]):\n if orbit_nr == metadata[\"orbit_start\"]:\n metadata[\"orbits\"][orbit_nr] = (orbit_start_time, orbit_end_time)\n else:\n metadata[\"orbits\"][orbit_nr] = (orbit_end_time, orbit_end_time +\n timedelta(seconds=6082))\n\n # extract data\n for f in fields:\n if eps_file.mdr_sfactor[f.upper()] == 1:\n data[f] = eps_file.mdr[f.upper()].flatten()[idx_nodes]\n else:\n data[f] = (eps_file.mdr[f.upper()].flatten() * 1. /\n eps_file.mdr_sfactor[f.upper()])[idx_nodes]\n\n data[\"swath_indicator\"] = (data[\"beam_number\"].flatten() > 3).astype(\n np.uint8)\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n fields = [(\"longitude_full\", long_nan), (\"latitude_full\", long_nan),\n (\"sigma0_full\", long_nan), (\"inc_angle_full\", uint_nan),\n (\"azi_angle_full\", int_nan), (\"land_frac\", uint_nan),\n (\"flagfield_gen2\", byte_nan)]\n\n for f, nan_val in fields:\n data[f] = eps_file.mdr[f.upper()].flatten()\n invalid = eps_file.mdr[f.upper()].flatten() == nan_val\n\n if eps_file.mdr_sfactor[f.upper()] != 1:\n data[f] = data[f] * 1. / eps_file.mdr_sfactor[f.upper()]\n\n data[f][invalid] = nan_val\n\n # modify longitudes from (0, 360) to (-180, 180)\n mask = np.logical_and(data[\"longitude_full\"] != long_nan,\n data[\"longitude_full\"] > 180)\n data[\"longitude_full\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n idx = (data[\"azi_angle_full\"] != int_nan) & (data[\"azi_angle_full\"] < 0)\n data[\"azi_angle_full\"][idx] += 360\n\n # set flags\n data[\"f_usable\"] = set_flags(data)\n\n return data, metadata",
"def makeModel(self):\n\n # Get the script\n modelScript = os.path.join(self.datapath, 'make3FGLxml.py')\n if not os.path.isfile(modelScript):\n # download it\n print(\"\\t=== Downloading make3FGLxml.py ===\")\n os.system('wget https://fermi.gsfc.nasa.gov/ssc/data/analysis/user/make3FGLxml.py -O {}'.format(modelScript))\n\n # Create the model using Tyrel's script\n galModel = os.path.join(self.diffpath, 'gll_iem_v06.fits')\n isoModel = os.path.join(self.diffpath, 'iso_'+self.irf+'_v06.txt')\n if (not os.path.isfile(galModel)) or (not os.path.isfile(isoModel)):\n print(\"\\t=== Unable to find the diffuse models, check the variable '$FERMI_DIR' ===\")\n return\n if not os.path.isdir(self.extpath):\n print(\"\\t=== Unable to find models of extended sources, check the variable '$LATEXTDIR' ===\")\n return\n if not os.path.isfile(self.fermicat):\n # download it\n print(\"\\t=== Downloading 3FGL catalog ===\")\n os.system('wget https://fermi.gsfc.nasa.gov/ssc/data/access/lat/4yr_catalog/gll_psc_v16.fit -O {}'.format(self.fermicat))\n\n os.popen(\"python {} {} {} -o {} -G {} -g 'gll_iem_v06'\\\n -I {} -i 'iso_source_v06' -e {} -r 5 -R 10 -ER 10\\\n -s 9 -m False -GIF False\".format(modelScript, self.fermicat,\n self.ft1, self.model, galModel, isoModel, self.extpath))\n\n # Add the target to the model\n tmpName = self.model + '.tmp'\n rfil = open(self.model, 'r')\n wfil = open(tmpName, 'w')\n # Copy the XML to the temporary model\n wfil.writelines([l for l in rfil.readlines() if not l=='</source_library>']) # copy everything but the last line\n wfil.write(' <source ROI_Center_Distance=\"0.00\" name=\"TARGET\" type=\"PointSource\">\\n')\n wfil.write(' <spectrum type=\"PowerLaw2\">\\n')\n wfil.write(' <parameter free=\"1\" max=\"1000\" min=\"1e-05\" name=\"Integral\" scale=\"1e-08\" value=\"0.3591824258\"/>\\n')\n wfil.write(' <parameter free=\"1\" max=\"1\" min=\"-5\" name=\"Index\" scale=\"1\" value=\"-2.7\"/>\\n')\n wfil.write(' <parameter free=\"0\" max=\"1000000\" min=\"20\" name=\"LowerLimit\" scale=\"1\" value=\"100\"/>\\n')\n wfil.write('<parameter free=\"0\" max=\"1000000\" min=\"20\" name=\"UpperLimit\" scale=\"1\" value=\"100000\"/>\\n')\n wfil.write(' </spectrum>\\n')\n wfil.write(' <spatialModel type=\"SkyDirFunction\">\\n')\n wfil.write(' <parameter free=\"0\" max=\"360.0\" min=\"-360.0\" name=\"RA\" scale=\"1.0\" value=\"'+str(self.ra)+'\"/>\\n')\n wfil.write(' <parameter free=\"0\" max=\"360.0\" min=\"-360.0\" name=\"DEC\" scale=\"1.0\" value=\"'+str(self.dec)+'\"/>\\n')\n wfil.write(' </spatialModel>\\n')\n wfil.write(' </source>\\n')\n wfil.write('</source_library>\\n')\n rfil.close()\n wfil.close()\n\n os.remove(self.model)\n os.rename(tmpName, self.model)\n \n print(\"\\t=== Source model {} added ===\".format(self.model))\n return",
"def dictaglat(kind, fname):\n km = kind + 'm'\n ks = kind + 's'\n \n d = {}\n f = open(fname)\n f.next()\n f.next()\n y = 0\n\n for l in f:\n #print(l)\n adict = agline(l)\n #print(adict['charget'])\n\n if y == adict['well']:\n continue\n else:\n gen = adict['gen']\n if gen not in d:\n d[gen] = [] \n\n if adict[km] != 'x' and adict[km] != '-' and adict[km] != '':\n d[gen].append(cmn.convtosec(float(adict[km]), float(adict[ks]))\n - float(cmn.convtosec(float(adict['offset']), 0)))\n \n if kind == 'flare':\n if adict[km] == '':\n d[gen].append(cmn.convtosec(float(adict['chargem']), \n float(adict['charges'])) -\n float(cmn.convtosec(float(adict['offset']), 0)))\n \n y = adict['well']\n return(d)",
"def process(kml_file, kmz=False):\n\ttry:\n\t\tif kmz:\n\t\t\tzipped = zipfile.ZipFile(kml_file)\n\t\t\tkml = Kml(zipped.open('doc.kml'))\n\t\telse: \n\t\t\tkml = Kml(open(kml_file))\n\texcept Exception as e:\n\t\tprint('Failed for %s: %s' % (kml_file, e))\n\telse:\n\t\tprint('FILE NAME: %s' % kml_file)\n\t\tif not is_duplicate(kml.as_dict(), collection): \n\t\t\t# try to update database AND\n\t\t\t# extract files to right place; if one\n\t\t\t# fails, undo the other:\t\n\t\t\ttry:\n\t\t\t\tcollection.insert_one(kml.as_dict())\n\t\t\texcept Exception as e:\n\t\t\t\tprint('Failed to update database with %s: %s' % (kml, e))\n\t\t\telse:\n\t\t\t\ttry:\n\t\t\t\t\tdest = 'static/kml/%s' % kml.uid\n\t\t\t\t\tif kmz:\n\t\t\t\t\t\tzipped.extractall(dest)\n\t\t\t\t\telse:\n\t\t\t\t\t\tif not os.path.exists(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest))\n\t\t\t\t\t\tshutil.copy(kml_file, '%s/doc.kml' % dest)\n\t\t\t\texcept Exception as e:\n\t\t\t\t\tprint('Failed to extract files: %s\\n\\tTrying to remove record from database...' % e)\n\t\t\t\t\ttry:\n\t\t\t\t\t\tcollection.remove(kml.as_json())\n\t\t\t\t\texcept Exception as e:\n\t\t\t\t\t\tprint('Failed to remove item from database -- db is no longer consistent w/ file system: %s' % e)\n\tfinally:\n\t\tif kmz:\n\t\t\tzipped.close()\n\t\telse:\n\t\t\tkml.close()",
"def ks2_to_alf(ks_path, out_path, sr=30000, nchannels=385, label=None, force=True):\n m = model.TemplateModel(dir_path=ks_path,\n dat_path=[],\n sample_rate=sr,\n n_channels_dat=nchannels)\n ac = alf.EphysAlfCreator(m)\n ac.convert(out_path, label=label, force=force)",
"def GEEmacaGCMs(ptsFile,metric,timeStep,startYear,endYear,scenarios,buf,poly,models,\n username,folderOut, scalePix = 4000):\n \n # load required libraries\n import ee\n \n # Initialize the Earth Engine object, using the authentication credentials.\n ee.Initialize()\n\n ID_field = \"geeID\"\n\n #load pts or poly file\n pts1 = ee.FeatureCollection('users/' + username + '/' + str(ptsFile))\n\n time_d = {}\n time_d['month'] = 'projm'\n time_d['year'] = 'projy'\n \n for met in metric:\n\n for scenario in scenarios:\n\n for model in models:\n\n MACA = (ee.ImageCollection('IDAHO_EPSCOR/MACAv2_METDATA_MONTHLY')\n .select(met)\n .filterMetadata('model', 'equals', model)\n .filterMetadata('scenario', 'equals', scenario))\n\n metL = [met]\n \n years = list(range(startYear, endYear + 1))\n yearsEE = ee.List(years)\n \n if all([(timeStep == 'year'),any([(met == 'tasmin'),(met == 'tasmax'),\n (met == 'huss'),(met == 'rsds'),\n (met == 'was')])]):\n\n def map_m(i):\n i = ee.Number(i).int()\n image2 = (MACA\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .first())\n filtered = (MACA\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .mean()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(yearsEE.map(map_m).flatten())\n\n elif (timeStep == 'month'):\n \n img_col = MACA.filter(ee.Filter.calendarRange(startYear, endYear, 'year'))\n\n elif all([(timeStep == 'year'),(met == 'pr')]):\n\n def map_m(i):\n i = ee.Number(i).int()\n image2 = (MACA\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .first())\n filtered = (MACA\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .sum()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(yearsEE.map(map_m).flatten())\n\n #else:\n #print(\"incorrect time step specified\")\n \n if buf > 0:\n bufL = [buf]\n def bufferPoly(feature):\n return feature.buffer(bufL[0])\n\n ptsB = pts1.map(bufferPoly)\n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = ptsB.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_MACA_'+str(met)+'_'+scenario+'_'+model+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_ptsB',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('buffered pts by:' + str(buf) + ' for MACA: ' + met + ' ' + scenario + ' ' + model)\n\n elif poly > 0:\n \n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_MACA_'+str(met)+'_'+scenario+'_'+model+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_poly1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('spatial mean in poly: no buffer for MACA: ' + met + ' ' + scenario + ' ' + model)\n\n else:\n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_MACA_'+str(met)+'_'+scenario+'_'+model+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_pts1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n #print('value at point: no buffer for MACA: ' + met + ' ' + scenario + ' ' + model)",
"def readKML(filename):\n\n kml_file = path.join(filename)\n\n #### se leen los elementos del KML\n with open(kml_file) as f:\n folder = parser.parse(f).getroot().Document.Folder\n\n #### se separan los elementos, nombres de los puntos y las coordenadas\n plnm=[]\n cordi=[]\n for pm in folder.Placemark:\n plnm1 = pm.name\n plcs1 = pm.Point.coordinates\n plnm.append(plnm1.text)\n cordi.append(plcs1.text)\n # print(cordi)\n # print(plnm) \n\n #### se genera el objeto pandas\n db=pd.DataFrame()\n db['point_name']=plnm\n db['cordinates']=cordi\n\n db['Longitude'], db['Latitude'], db['value'] = zip(*db['cordinates'].apply(lambda x: x.split(',', 2)))\n db[\"Longitude\"] = pd.to_numeric(db[\"Longitude\"])\n db[\"Latitude\"] = pd.to_numeric(db[\"Latitude\"])\n del db['cordinates']\n del db['value']\n\n db['Coordinates'] = list(zip(db.Longitude, db.Latitude))\n db['Coordinates'] = db['Coordinates'].apply(Point)\n\n # print(db)\n\n return db",
"def kmlWriter(output_data, output_dir, output_name):\n msg = 'Writing ' + output_name + ' KML output.'\n print '[+]', msg\n logging.info(msg)\n # Instantiate a Kml object and pass along the output filename\n kml = simplekml.Kml(name=output_name)\n for exif in output_data:\n if 'Latitude' in exif.keys() and 'Latitude Reference' in exif.keys() and 'Longitude Reference' in exif.keys() and 'Longitude' in exif.keys():\n\n if 'Original Date' in exif.keys():\n dt = exif['Original Date']\n else:\n dt = 'N/A'\n\n if exif['Latitude Reference'] == 'S':\n latitude = '-' + exif['Latitude']\n else:\n latitude = exif['Latitude']\n\n if exif['Longitude Reference'] == 'W':\n longitude = '-' + exif['Longitude']\n else:\n longitude = exif['Longitude']\n\n kml.newpoint(name=exif['Name'], description='Originally Created: ' + dt,\n coords=[(longitude, latitude)])\n else:\n pass\n kml.save(os.path.join(output_dir, output_name))",
"def AlignFieldmaps(self):\n for entry in self.entry_map['fmap']:\n info = self.info[entry]\n\n# Register the magnitude image at the shortest TR to the T1-IR\n# structural image.\n target = self.info[self.norm_src]['imgfile'] + \\\n self.info[self.norm_src]['suffix']\n source = info['magfile'] + info['suffix']\n matfile = info['matfile']\n fmt = '3dAllineate -prefix NULL -1Dmatrix_save %s -base %s ' + \\\n '-source %s -cost mi -warp shift_rotate'\n cmd = fmt % (info['matfile'], target, source)\n self.CheckExec(cmd, [info['matfile']])\n\n# Convert to unitary matrix (remove scaling component.)\n cmd = 'cat_matvec -ONELINE %s -P > %s' % \\\n (info['matfile'], info['matfile_unitary'])\n self.CheckExec(cmd, [info['matfile_unitary']])\n\n# Rotate the magnitude image to the new grid.\n fmt = '3dAllineate -prefix %s -interp cubic -1Dmatrix_apply %s %s'\n cmd = fmt % (info['magfile_r']+info['suffix'], \\\n info['matfile_unitary'], info['magfile'] + info['suffix'])\n self.CheckExec(cmd, [info['magfile_r']+info['suffix']])\n\n# Rotate the fieldmap to the new grid.\n fmt = '3dAllineate -prefix %s -interp cubic -1Dmatrix_apply %s %s'\n cmd = fmt % (info['imgfile_r']+info['suffix'], \\\n info['matfile_unitary'], info['imgfile'] + info['suffix'])\n self.CheckExec(cmd, [info['imgfile_r']+info['suffix']])",
"def read_flat_map(filename,i_map=0) :\n hdul=fits.open(filename)\n w=WCS(hdul[0].header)\n\n maps=hdul[i_map].data\n ny,nx=maps.shape\n\n return w,maps",
"def __make_geo(self):\n # gmsh freecad_part.iges -o out_iges.geo -0\n fname_list = self.__fname.split('.')\n geo_file = fname_list[0]+'.geo'\n runstr = \"%s %s -o %s -0\" % (environment.GMSH, self.__fname, geo_file)\n print(runstr)\n subprocess.call(runstr, shell=True)\n print('Wrote file: %s' % geo_file)",
"def map2mw_Aug(d,k1,entry):\n L = entry.metad['L']\n if L in ['7201','7202']: # 7203 relates to 'hay'\n return 'hA'\n if k1 in map2mw_special_Aug:\n return map2mw_special_Aug[k1]\n regexes = [\n u'<ab>aug.</ab> de {%(.*?)%}',\n u'<ab>aug.</ab> {%(.*?)%}',\n u'<ab>aug.</ab> du <ab>c.</ab> de {%(.*?)%}',\n\n ]\n line = entry.datalines[0] # first line of entry in bur.txt\n for regex in regexes:\n m = re.search(regex,line)\n if m:\n root = m.group(1) # root in \n root_slp1=roman_slp1_mw(root,'verb',d)\n if root_slp1 != None:\n return root_slp1\n\n return '?'",
"def json_to_lkg(filename):\n\n nx_graph = json_to_nx(filename)\n lkg = nx_to_lkg(nx_graph)\n return(lkg)",
"def generate_kml(tx, ty, tz, tileext, tilesize, tileswne, options, children=None, **args):\n if not children:\n children = []\n\n args['tx'], args['ty'], args['tz'] = tx, ty, tz\n args['tileformat'] = tileext\n if 'tilesize' not in args:\n args['tilesize'] = tilesize\n\n if 'minlodpixels' not in args:\n args['minlodpixels'] = int(args['tilesize'] / 2)\n if 'maxlodpixels' not in args:\n args['maxlodpixels'] = int(args['tilesize'] * 8)\n if children == []:\n args['maxlodpixels'] = -1\n\n if tx is None:\n tilekml = False\n args['title'] = options.title\n else:\n tilekml = True\n args['title'] = \"%d/%d/%d.kml\" % (tz, tx, ty)\n args['south'], args['west'], args['north'], args['east'] = tileswne(tx, ty, tz)\n\n if tx == 0:\n args['drawOrder'] = 2 * tz + 1\n elif tx is not None:\n args['drawOrder'] = 2 * tz\n else:\n args['drawOrder'] = 0\n\n url = options.url\n if not url:\n if tilekml:\n url = \"../../\"\n else:\n url = \"\"\n\n s = \"\"\"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<kml xmlns=\"http://www.opengis.net/kml/2.2\">\n <Document>\n <name>%(title)s</name>\n <description></description>\n <Style>\n <ListStyle id=\"hideChildren\">\n <listItemType>checkHideChildren</listItemType>\n </ListStyle>\n </Style>\"\"\" % args\n if tilekml:\n s += \"\"\"\n <Region>\n <LatLonAltBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonAltBox>\n <Lod>\n <minLodPixels>%(minlodpixels)d</minLodPixels>\n <maxLodPixels>%(maxlodpixels)d</maxLodPixels>\n </Lod>\n </Region>\n <GroundOverlay>\n <drawOrder>%(drawOrder)d</drawOrder>\n <Icon>\n <href>%(ty)d.%(tileformat)s</href>\n </Icon>\n <LatLonBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonBox>\n </GroundOverlay>\n\"\"\" % args\n\n for cx, cy, cz in children:\n csouth, cwest, cnorth, ceast = tileswne(cx, cy, cz)\n s += \"\"\"\n <NetworkLink>\n <name>%d/%d/%d.%s</name>\n <Region>\n <LatLonAltBox>\n <north>%.14f</north>\n <south>%.14f</south>\n <east>%.14f</east>\n <west>%.14f</west>\n </LatLonAltBox>\n <Lod>\n <minLodPixels>%d</minLodPixels>\n <maxLodPixels>-1</maxLodPixels>\n </Lod>\n </Region>\n <Link>\n <href>%s%d/%d/%d.kml</href>\n <viewRefreshMode>onRegion</viewRefreshMode>\n <viewFormat/>\n </Link>\n </NetworkLink>\n \"\"\" % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest,\n args['minlodpixels'], url, cz, cx, cy)\n\n s += \"\"\" </Document>\n</kml>\n \"\"\"\n return s",
"def Infomap(pajek_string, *args, **kwargs):\n \n def _default_to_regular(d):\n \"\"\"Recursively convert nested defaultdicts to nested dicts.\n \"\"\"\n if isinstance(d, defaultdict):\n d = {k: _default_to_regular(v) for k, v in d.items()}\n return d\n \n def _get_id_to_label(filename):\n def __int_if_int(val):\n try: return int(val)\n except ValueError: return val\n with open('/tmp/input_infomap/' + filename + \".net\", 'r') as fp:\n parsed_network = fp.read()\n return dict(\n (int(n.split()[0]), __int_if_int(n.split('\"')[1]))\n for n in re.split(r\"\\*.+\", parsed_network)[1].split(\"\\n\")[1:-1]\n )\n \n def multilayer(id_to_label, filename):\n with open('/tmp/output_infomap/'+filename+\"_expanded.clu\", 'r') as infile:\n clusters = infile.read()\n\n # Get layers, nodes and clusters from _extended.clu file\n la_no_clu_flow = re.findall(r'\\d+ \\d+ \\d+ \\d.*\\d*', clusters) # [\"30 1 2 0.00800543\",...]\n la_no_clu_flow = [tuple(i.split()) for i in la_no_clu_flow]\n\n layer_node_flow_json = defaultdict(float) # {layer_node: flow, ...}\n node_flow_json = defaultdict(float) # {node: flow, ...}\n community_flow_json = defaultdict(float) # {community: flow, ...}\n communities_json = defaultdict(set) # {layer: {(node, cluster), ...}, ...}\n for layer, node, cluster, flow in la_no_clu_flow:\n layer_node_flow_json[\"%s_%s\" % (layer, id_to_label[int(node)])] += float(flow)\n node_flow_json[\"%s\" % (id_to_label[int(node)])] += float(flow)\n community_flow_json[cluster] += float(flow)\n communities_json[int(layer)].add((id_to_label[int(node)], int(cluster)))\n\n return communities_json, layer_node_flow_json, node_flow_json, community_flow_json\n \n def _parse_communities_planar(id_to_label, filename):\n with open('/tmp/output_infomap/'+filename+\".clu\", 'r') as infile:\n clusters = infile.read()\n \n # Get nodes and clusters from .clu file\n no_clu = [tuple(i.split()[:-1]) for i in re.findall(r\"\\d+ \\d+ \\d.*\\d*\", clusters)] # [(node, cluster), ...]\n return {0: set([(id_to_label[int(no)], int(clu)) for no, clu in no_clu])}\n \n def _clean_up(filename):\n subprocess.call(['rm', '/tmp/input_infomap/' + filename + '.net'])\n subprocess.call(['rm', '/tmp/output_infomap/' + filename + '_expanded.clu'])\n subprocess.call(['rm', '/tmp/output_infomap/' + filename + '.clu'])\n \n # Check for process id in args (for multiprocessing)\n if args[-1][:3] == \"pid\":\n pid = args[-1][3:]\n args = args[:-1]\n else:\n pid = \"\"\n\n # Try to make input_infomap and output_infomap folders in /tmp\n subprocess.call(['mkdir', '/tmp/input_infomap', '/tmp/output_infomap'])\n \n \n # Get network in multilayer string format and define filename\n filename = 'tmpnet' + pid\n\n # Store locally\n with open(\"/tmp/input_infomap/\"+filename+\".net\", 'w') as outfile:\n outfile.write(pajek_string)\n \n # Run Infomap for multilayer network\n subprocess.call(\n ['Infomap', '/tmp/input_infomap/'+filename+\".net\", '/tmp/output_infomap'] + \\\n list(args)\n )\n \n # Parse communities from Infomap output_infomap\n id_to_label = _get_id_to_label(filename)\n \n if 'multilayer' in list(args):\n parsed_communities, layer_node_flow, node_flow, community_flow = multilayer(id_to_label, filename)\n if 'pajek' in list(args):\n parsed_communities = _parse_communities_planar(id_to_label, filename)\n \n _clean_up(filename)\n\n # Produce layer communities\n layer_communities = {}\n for layer, group in list(parsed_communities.items()):\n communities = {}\n for no, clu in group: \n try:\n communities[clu-1].append(no)\n except KeyError:\n communities[clu-1] = [no]\n layer_communities[layer] = communities\n \n # Produce community_members\n community_members = defaultdict(Counter)\n for _, communities in list(layer_communities.items()):\n for c, members in list(communities.items()):\n community_members[c].update(members)\n\n return [\n _default_to_regular(community_members),\n layer_communities,\n _default_to_regular(layer_node_flow),\n _default_to_regular(node_flow),\n _default_to_regular(community_flow)\n ]",
"def writeInput_for_LAMMPS(rd, listAtoms, filename):\n #f=open(\"geo.kirigami_d0.0_\"+str(rd),\"w+\")\n f=open(filename+str(rd),\"w+\")\n f.write(\"\\n\")\n f.write(\"%d atoms\\n\" %len(listAtoms))\n f.write(\"1 atom types\\n\")\n f.write(\"\\n\")\n f.write(\"%f\\t%f xlo xhi\\n\" %(xlo-1, xhi+1))\n f.write(\"%f\\t%f ylo yhi\\n\" %(ylo-1, yhi+1))\n f.write(\"%f\\t%f zlo zhi\\n\" %(zlo-1, zhi+1))\n f.write(\"\\n\")\n f.write(\"Atoms\\n\")\n f.write(\"\\n\")\n for i in range (len(listAtoms)):\n f.write(\"%d\\t1\\t%f\\t%f\\t%f\\n\" %(i+1, listAtoms[i][0], listAtoms[i][1], listAtoms[i][2]))\n f.close()",
"def parse_geometry(path: str) -> Optional[Dict[str, tuple]]:\n if not os.path.isfile(path):\n raise InputError(f'Could not find file {path}')\n if path.endswith('.yml'):\n content = read_yaml_file(path)\n if isinstance(content, dict):\n if 'xyz' in content.keys():\n return content['xyz'] if isinstance(content['xyz'], dict) else str_to_xyz(content['xyz'])\n elif 'opt_xyz' in content.keys():\n return content['opt_xyz'] if isinstance(content['opt_xyz'], dict) else str_to_xyz(content['opt_xyz'])\n software = identify_ess(path)\n xyz_str = ''\n if software == 'xtb':\n lines = _get_lines_from_file(path)\n final_structure, coord, first_line = False, False, True\n for line in lines:\n if '$' in line or 'END' in line or len(line.split()) < 10:\n coord = False\n if coord:\n splits = line.split()\n xyz_str += f'{qcel.periodictable.to_E(splits[3])} {splits[0]} {splits[1]} {splits[2]}\\n'\n if final_structure and ('$coord' in line or len(line.split()) > 15):\n coord = True\n if len(line.split()) > 15 and first_line:\n splits = line.split()\n xyz_str += f'{qcel.periodictable.to_E(splits[3])} {splits[0]} {splits[1]} {splits[2]}\\n'\n first_line = False\n if 'final structure:' in line:\n final_structure = True\n return str_to_xyz(xyz_str)\n\n log = ess_factory(fullpath=path, check_for_errors=False)\n try:\n coords, number, _ = log.load_geometry()\n except LogError:\n logger.debug(f'Could not parse xyz from {path}')\n\n # Try parsing Gaussian standard orientation instead of the input orientation parsed by Arkane.\n lines = _get_lines_from_file(path)\n for i in range(len(lines)):\n if 'Standard orientation:' in lines[i]:\n xyz_str = ''\n j = i\n while len(lines) and not lines[j].split()[0].isdigit():\n j += 1\n while len(lines) and '-------------------' not in lines[j]:\n splits = lines[j].split()\n xyz_str += f'{qcel.periodictable.to_E(int(splits[1]))} {splits[3]} {splits[4]} {splits[5]}\\n'\n j += 1\n break\n\n if xyz_str:\n return str_to_xyz(xyz_str)\n return None\n\n return xyz_from_data(coords=coords, numbers=number)",
"def loadGeoTransform(filepath):\n \n from osgeo import gdal\n \n ds = gdal.Open(filepath, 0)\n \n return ds.GetGeoTransform()",
"def write_to_kml(gps_df, output_path):\n coordinates = []\n for index, row in gps_df.iterrows():\n lat = (1 if row['Lat_dir'] == 'N' else -1) * (float(row['Lat'][0:2]) + (float(row['Lat'][2:]) / 60))\n long = (1 if row['Long_dir'] == 'E' else -1) * (float(row['Long'][0:3]) + (float(row['Long'][3:]) / 60))\n speed = row['Speed']\n coordinates.append((long, lat, speed))\n\n kml_file = kml.newlinestring(name='line', coords=coordinates)\n kml_file.linestyle.color = simplekml.Color.cyan\n kml_file.linestyle.width = 3\n kml_file.polystyle.color = simplekml.Color.cyan\n kml_file.altitudemode = simplekml.AltitudeMode.relativetoground\n kml_file.extrude = 1\n\n # stores all coordinates into the output file\n with open(output_path, \"w+\"):\n kml.save(output_path, format=True)",
"def read_szf_fmv_13(eps_file):\n data = {}\n metadata = {}\n\n n_lines = eps_file.mdr_counter\n n_node_per_line = eps_file.mdr[\"LONGITUDE_FULL\"].shape[1]\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n # extract metadata\n metadata[\"spacecraft_id\"] = np.int8(eps_file.mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(eps_file.mphr[\"ORBIT_START\"])\n metadata[\"state_vector_time\"] = datetime.strptime(\n eps_file.mphr[\"STATE_VECTOR_TIME\"][:-4], \"%Y%m%d%H%M%S\")\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n for f in fields:\n metadata[f] = np.int16(eps_file.mphr[f.upper()])\n\n # extract time\n dt = np.datetime64(\n \"2000-01-01\") + eps_file.mdr[\"UTC_LOCALISATION\"][\"day\"].astype(\n \"timedelta64[D]\"\n ) + eps_file.mdr[\"UTC_LOCALISATION\"][\"time\"].astype(\"timedelta64[ms]\")\n data[\"time\"] = dt[idx_nodes]\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"beam_number\", \"flagfield_rf1\", \"flagfield_rf2\", \"flagfield_pl\",\n \"flagfield_gen1\"\n ]\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"beam_number\"\n ]\n\n # 101 min = 6082 seconds\n # state_vector_time = ascending node crossing time - 1520.5,\n # time crossing at -90 lat\n orbit_start_time = metadata[\"state_vector_time\"] - timedelta(\n seconds=1520.5)\n orbit_end_time = orbit_start_time + timedelta(seconds=6082)\n\n data[\"orbit_nr\"] = np.ma.zeros(\n data[\"time\"].size, dtype=np.int32,\n fill_value=int32_nan) + metadata[\"orbit_start\"]\n data[\"orbit_nr\"][data[\"time\"] > orbit_end_time] += 1\n\n metadata[\"orbits\"] = {}\n for orbit_nr in np.unique(data[\"orbit_nr\"]):\n if orbit_nr == metadata[\"orbit_start\"]:\n metadata[\"orbits\"][orbit_nr] = (orbit_start_time, orbit_end_time)\n else:\n metadata[\"orbits\"][orbit_nr] = (orbit_end_time, orbit_end_time +\n timedelta(seconds=6082))\n\n # extract data\n for f in fields:\n if eps_file.mdr_sfactor[f.upper()] == 1:\n data[f] = eps_file.mdr[f.upper()].flatten()[idx_nodes]\n else:\n data[f] = (eps_file.mdr[f.upper()].flatten() * 1. /\n eps_file.mdr_sfactor[f.upper()])[idx_nodes]\n\n data[\"swath_indicator\"] = (data[\"beam_number\"].flatten() > 3).astype(\n np.uint8)\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n fields = [(\"longitude_full\", long_nan), (\"latitude_full\", long_nan),\n (\"sigma0_full\", long_nan), (\"inc_angle_full\", uint_nan),\n (\"azi_angle_full\", int_nan), (\"flagfield\", int_nan)]\n\n for f, nan_val in fields:\n data[f] = eps_file.mdr[f.upper()].flatten()\n invalid = eps_file.mdr[f.upper()].flatten() == nan_val\n\n if eps_file.mdr_sfactor[f.upper()] != 1:\n data[f] = data[f] * 1. / eps_file.mdr_sfactor[f.upper()]\n\n data[f][invalid] = nan_val\n\n # modify longitudes from (0, 360) to (-180, 180)\n mask = np.logical_and(data[\"longitude_full\"] != long_nan,\n data[\"longitude_full\"] > 180)\n data[\"longitude_full\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n idx = (data[\"azi_angle_full\"] != int_nan) & (data[\"azi_angle_full\"] < 0)\n data[\"azi_angle_full\"][idx] += 360\n\n # set flags\n data[\"f_usable\"] = set_flags_fmv13(data[\"flagfield\"])\n\n return data, metadata",
"def kml_extract_RDD(xml_file):\n soup = BeautifulSoup(xml_file, \"lxml-xml\")\n return get_kml_content(soup)",
"def import_phase_map(self, file):\r\n filename, extension = os.path.splitext(file)\r\n flag = False\r\n if extension == '.txt':\r\n p = np.loadtxt(file, dtype=int, delimiter=' ')\r\n if np.shape(p) != self.size:\r\n return\r\n if np.shape(p) == (self.height, self.width):\r\n p = p.T\r\n m = np.zeros((self.width, self.height, 3), dtype=np.uint8)\r\n m[:, :, 0] = p\r\n m[:, :, 1] = p\r\n m[:, :, 2] = p\r\n self.maps[os.path.basename(filename)] = {'data': m}\r\n elif extension == '.fits':\r\n hdu = fits.open(file)\r\n p = fits.getdata(file)\r\n if np.shape(p) == self.size and np.max(p)<3:\r\n self.maps[os.path.basename(filename)] = {'map': p}\r\n flag = True\r\n elif np.shape(p) == self.size and np.max(p)>3:\r\n m = np.zeros((self.width, self.height, 3), dtype=np.uint8)\r\n m[:, :, 0] = p\r\n m[:, :, 1] = p\r\n m[:, :, 2] = p\r\n self.maps[os.path.basename(filename)] = {'data': m}\r\n else:\r\n print(\"Wrong dimensions \")\r\n elif extension == '.bmp':\r\n p = pygame.image.load(file)\r\n p = pygame.surfarray.array3d(p)\r\n self.maps[os.path.basename(filename)] = {'data': p}\r\n else:\r\n raise UserWarning('File extension %s not supported.' % extension)\r\n\r\n # if np.shape(self.maps[filename]) != (1024, 768, 3):\r\n # raise UserWarning('Wrong dimensions for SLM of size %i x %i (x %i)' % (self.width, self.height, 3))\r\n\r\n # self.save_maps()\r\n return filename, flag",
"def tsMap(self):\n mapplt = FermiMap()\n mapplt.savepath = self.workpath\n mapplt.image = self.outtsmap\n mapplt.figname = 'TSMAP.pdf'\n mapplt.cbarlabel = r'TS'\n mapplt.mapSky()\n if showSrc:\n srcs = self.getSrc()\n srcs = srcs[(srcs['Separation'] <= 3.) & ([not i.endswith('c') for i in srcs['Name']])]\n mapplt.srcSky(srcs['RA'], srcs['DEC'], srcs['Name'])\n mapplt.save()\n\n print(\"\\t=== Figure '{}' created ===\".format( os.path.join(mapplt.savepath, mapplt.figname) ))\n return",
"def show_kml_list():\n out = []\n\n for filename in os.listdir(settings.KML_OUTPUT_DIR):\n path = os.path.join(settings.KML_OUTPUT_DIR, filename)\n if os.path.isdir(path):\n continue\n f = open(path)\n content = f.read(300)\n f.close()\n name = KML_NAME_RE.search(content)\n if not name:\n continue\n out.append((name.group(1), filename))\n\n return {'items': sorted(out, cmp=lambda a, b: dumb_czech_cmp(a, b)), 'MEDIA_URL': settings.MEDIA_URL}",
"def to_oskar_telescope_model(self, filename):\n pass",
"def read_model(modelfile, dictlist):\n global dxdict\n global dxlist\n global import_img\n dxdict, dxlist = {}, [] # the list is needed for fixed ordering\n mod = io.open(modelfile, 'r')\n st = next(mod)\n ### image adress is found\n while 'SCHEME_IMAGE' not in st:\n st = next(mod)\n #image_adress = st.strip().split()[-1]\n #import_img = ImageTk.PhotoImage(Image.open(image_adress).resize((496, 384), Image.ANTIALIAS))\n #scheme.configure(image = import_img)\n ### the file must contain equations for ODE between ***STATES*** and ***END*** statements\n while \"***STATES***\" not in st:\n st = next(mod)\n #\n while \"***END***\" not in st:\n st = next(mod)\n try:\n dxdict[st.split('=')[0].strip()] = st.split('=')[1].strip().strip(';')\n dxlist.append(st.split('=')[0].strip())\n except:\n continue\n ## now, add dict names to the equations\n ## also, add state names to the PREDEFINED dict\n for s in dxdict.keys():\n for d in dictlist:\n keys = d + '.keys()'\n for k in eval(keys):\n dxdict[s] = dxdict[s].replace(k, \"%(d)s['%(k)s']\" % vars())\n ##\n for i in dxdict.keys():\n for j in dxdict.keys():\n if \"Xdict['%(j)s']\" % vars() not in dxdict[i]:\n dxdict[i] = dxdict[i].replace(j, \"Xdict['%(j)s']\" % vars())\n modelprint, nstates = os.path.basename(modelfile), len(dxlist)",
"def __init__(self, t_0, t_1):\n\n temp = ERA5Product('hourly','pressure', ['temperature'])\n z = ERA5Product('hourly','pressure', ['geopotential']) \n \n file_temp = temp.download(t_0, t_1)\n file_z = z.download(t_0, t_1)\n \n year = t_1.year\n month = f\"{t_1.month:02d}\"\n day = f\"{t_1.day:02d}\"\n hour = f\"{t_1.hour:02d}\" \n\n# file_temp = \"ERA5/reanalysis-era5-pressure-levels/reanalysis-era5-pressure-levels_2020030112_temperature.nc\"\n# file_z = \"ERA5/reanalysis-era5-pressure-levels/reanalysis-era5-pressure-levels_2020030112_geopotential.nc\"\n file_temp = ([\n \"ERA5/reanalysis-era5-pressure-levels/reanalysis-era5-pressure-levels_\"\n + str(year) + str(month) \n + str(day) + str(hour) + \"_\" + \"temperature\" + '.nc'])\n \n file_z = ([\n \"ERA5/reanalysis-era5-pressure-levels/reanalysis-era5-pressure-levels_\"\n + str(year) + str(month) \n + str(day) + str(hour) + \"_\" + \"geopotential\" + '.nc'])\n \n print (file_temp, file_z)\n self.t = temp.open(filename = file_temp[0]) \n self.z = z.open(filename = file_z[0])\n\n# flip latitude to be in ascending order \n self.t = self.t.sortby('latitude' , ascending = True) \n self.z = self.z.sortby('latitude' , ascending = True)",
"def time_calibration(input_file):\n original_path = os.getcwd()\n save_path = input_file['save_path']\n #change to save data reduction directory\n os.chdir(save_path)\n print '\\n Reading the list of images ....\\n'\n planet = input_file['exoplanet'] #set exoplanet name\n images = sorted(glob.glob('AB'+planet+'*.fits'))\n print images\n #include de RA,DEC and epoch of the exoplanet\n RA,DEC,epoch = input_file['RA'],input_file['DEC'],input_file['epoch']\n #obtain ST JD using iraf task and introduce in the header\n for i in range(len(images)):\n hdr = fits.getheader(images[i])\n if int(split(hdr['UT'],':')[0]) < int(hdr['timezone']):\n new_date = use.yesterday(hdr['date-obs'])\n #print images[i], new_date\n else:\n new_date = hdr['date-obs']\n year,month,day = split(new_date,'-')\n iraf.asttimes(year=year,month=month,day=day,time=hdr['loctime'],obs=input_file['observatory'])\n JD = iraf.asttimes.jd #obtain julian date\n LMST = iraf.asttimes.lmst #obtain the sideral time\n LMST = use.sexagesimal_format(LMST) #convert sideral time in sexagesimal format\n iraf.hedit(images[i],'ST',LMST,add='yes',verify='no',show='no',update='yes') #create the ST keyword in the header\n iraf.ccdhedit(images[i],'LMST',LMST,type='string') #include the mean sideral time in the header\n iraf.ccdhedit(images[i],'JD',JD,type='string') #include de julian date in the header\n #include RA, and DEC of the object in your header\n iraf.ccdhedit(images[i],\"RA\",RA,type=\"string\") #include right ascention in the header\n iraf.ccdhedit(images[i],\"DEC\",DEC,type=\"string\") #include declination in the header\n iraf.ccdhedit(images[i],\"epoch\",epoch,type=\"string\") #include epoch in the header\n # use.update_progress((i+1.)/len(images))\n print '\\n Setting airmass ....\\n'\n for i in range(len(images)):\n print '# ',images[i]\n #iraf.hedit(images[i],'airmass',airmass,add='yes')\n #iraf.hedit(images[i],'HJD',HJD,add='yes')\n iraf.setairmass.observatory = input_file['observatory']\n iraf.setairmass(images[i])\n iraf.setjd.time = 'ut'\n iraf.setjd(images[i])\n print '\\n.... done.\\n'\n #export information\n hjd, jd, airmass, st = [],[],[],[]\n for i in range(len(images)):\n hdr = fits.getheader(images[i])\n hjd.append(hdr['HJD'])\n jd.append(hdr['JD'])\n airmass.append(hdr['airmass'])\n st.append(hdr['st'])\n #saving the data\n data = DataFrame([list(hjd),list(jd),list(st),list(airmass)]).T\n data.columns = ['HJD','JD','ST','Airmass']\n data.to_csv('results_iraf_calibrations.csv')\n #change to workings directory\n os.chdir(original_path)\n return",
"def main(name, line1, line2, orbital_filename):\n #name = \"TERRA\"\n #line1 = \"1 25994U 99068A 16048.43680378 .00000258 00000-0 67198-4 0 9999\"\n #line2 = \"2 25994 98.1982 124.4247 0001352 105.3907 254.7441 14.57126067859938\"\n satellite = ephem.readtle(name, line1, line2)\n \n\n # Landsat 8\n #name = \"Landsat8\"\n #line1=\"1 39084U 13008A 16051.82349873 .00000188 00000-0 51829-4 0 9999\"\n #line2=\"2 39084 98.1988 123.2603 0001265 89.4360 270.6984 14.57110027160810\"\n #LD8 = ephem.readtle(name, line1, line2)\n \n\n sun = ephem.Sun()\n fov = np.radians(68.6)\n\n \"\"\"\n Make pandas dataframe to store swath information\n \"\"\"\n import pandas as pd\n data = {\"DateTime\": [],\"DOY\":[],\"Month\": [],\n \"orbit_id\":[], \"ground_lat\": [], \n \"ground_lon\": [], \"swath_width\": []}\n swaths = pd.DataFrame(data)\n swaths.set_index(keys=\"DateTime\")\n # generate shapefile\n\n orbit_id = 0\n # need to do splitted by hemisphere unfortunately..\n for orbit in make_an_orbit(satellite):\n #import pdb; pdb.set_trace()\n if len(orbit) > 1:\n \"\"\"\n So worth doing processing on orbit...\n\n \"\"\"\n sun = ephem.Sun()\n\n print(orbit[0].datetime)\n\n for overpass in orbit:\n overpass.only_daytime_overpasses(sun)\n overpass.derive_swath_width(fov)\n \"\"\"\n Create a tempoary dataframe for this orbit\n \"\"\"\n epoch = datetime.datetime(1970, 1, 1)\n #import pdb; pdb.set_trace()\n tmp_d = {\"DateTime\": [(o.datetime - epoch).total_seconds() for o in orbit],\n \"DOY\":[int(o.datetime.strftime('%j')) for o in orbit],\n \"Month\": [o.datetime.month for o in orbit],\n \"orbit_id\": orbit_id * np.ones(len(orbit)),\n \"ground_lat\": [o.lat for o in orbit],\n \"ground_lon\": [o.long for o in orbit],\n \"swath_width\": [o.swath_width for o in orbit]}\n tmp = pd.DataFrame(tmp_d)\n tmp.set_index(keys=\"DateTime\")\n #import pdb; pdb.set_trace()\n orbit_id +=1 \n \"\"\"\n Append to main dataframe\n \"\"\"\n swaths = swaths.append(tmp)\n #swaths.set_index(keys=\"DateTime\")\n\n \"\"\"\n Save the DataFrame to a file\n \"\"\"\n swaths = swaths.set_index(keys=\"DateTime\")\n #swaths.set_index(keys=\"DateTime\")\n #import pdb; pdb.set_trace()\n swaths.to_csv(orbital_filename, header=True)",
"def cmip6_renaming_dict():\n # I could probably simplify this with a generalized single dict, \n # which has every single possible `wrong` name and then for each model\n # the renaming function just goes through them...\n dim_name_dict = {\n \"AWI-CM-1-1-MR\":{},\n \"BCC-CSM2-MR\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"vertex\": None,\n 'time_bounds': \"time_bnds\",\n },\n \"BCC-ESM1\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"vertex\": \"vertex\",\n 'time_bounds': \"time_bnds\",\n },\n \"CAMS-CSM1-0\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n \"vertex\": 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n \"CanESM5\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n \"time_bounds\": \"time_bnds\",\n \"vertex\": \"vertices\",\n },\n \"CanESM5-CanOE\": {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n \"vertex\": \"vertices\",\n },\n \"CNRM-CM6-1\": {\n \"x\": [\"x\", 'lon'],\n \"y\": [\"y\", 'lat'],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\": \"axis_nbounds\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": \"bounds_lon\",\n \"lat_bounds\": \"bounds_lat\",\n 'vertex': \"nvertex\",\n 'time_bounds': \"time_bnds\",\n },\n \"CNRM-ESM2-1\": {\n \"x\": [\"x\", \"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": \"bounds_lon\",\n \"lat_bounds\": \"bounds_lat\",\n \"bnds\":\"axis_nbounds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"E3SM-1-0\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\":\"time_bounds\",\n 'vertex': None,\n },\n \"E3SM-1-1\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\":\"time_bounds\",\n 'vertex': None,\n },\n \"E3SM-1-1-ECA\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\":\"time_bounds\",\n 'vertex': None,\n },\n \"EC-Earth3-LR\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n \"EC-Earth3-Veg\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n \"EC-Earth3\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n \"FGOALS-f3-L\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n \"NICAM16-7S\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n 'vertex': 'vertices',\n },\n \"MIROC-ES2L\": {\n \"x\": [\"x\", 'lon'],\n \"y\": [\"y\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": [\"lev\", \"zlev\"],\n \"lev_bounds\": [\"lev_bnds\", \"zlev_bnds\"],\n \"lon_bounds\": \"x_bnds\",\n \"lat_bounds\": \"y_bnds\",\n \"time_bounds\": \"time_bnds\",\n 'vertex': 'vertices',\n },\n \"MIROC6\": {\n \"x\": [\"x\", 'lon'],\n \"y\": [\"y\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"x_bnds\",\n \"lat_bounds\": \"y_bnds\",\n 'time_bounds': \"time_bnds\",\n },\n \"HadGEM3-GC31-LL\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n },\n \"HadGEM3-GC31-MM\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n },\n \"UKESM1-0-LL\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n \"time_bounds\":\"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n 'GISS-E2-2-G': { \n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"GISS-E2-1-G-CC\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"GISS-E2-1-G\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"GISS-E2-1-H\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"CESM1-1-CAM5-CMIP5\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":\"d2\",\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2-WACCM\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":\"d2\",\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2-WACCM-FV2\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":\"d2\",\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":'d2',\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2-FV2\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":'d2',\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"GFDL-CM4\": {\n \"x\": [\"x\",\"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n # 'vertex': 'vertex',\n # 'dzt': 'thkcello',\n },\n \"GFDL-OM4p5B\": {\n \"x\": [\"x\",\"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n # 'vertex': 'vertex',\n # 'dzt': 'thkcello',\n },\n \"GFDL-ESM4\": {\n \"x\": [\"x\",\"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n # 'vertex': 'vertex',\n # 'dzt': 'thkcello',\n },\n \"NESM3\": {\n \"x\": ['i', \"lon\"],\n \"y\": ['j', \"lat\"],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n \"MRI-ESM2-0\": {\n \"x\": ['x', \"lon\"],\n \"y\": ['y', \"lat\"],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"bnds\":'bnds',\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": [\"x_bnds\", 'lon_bnds'],\n \"lat_bounds\": [\"y_bnds\", 'lat_bnds'],\n \"time_bounds\": \"time_bnds\",\n 'vertex': 'vertices',\n },\n \"SAM0-UNICON\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n \"MCM-UA-1-0\": {\n \"x\": \"longitude\",\n \"y\": \"latitude\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n }, \n 'IPSL-CM6A-LR': {\n \"x\": ['x', \"lon\"],\n \"y\": ['y', \"lat\"],\n \"lon\": 'nav_lon',\n \"lat\": 'nav_lat',\n \"lev\": [\"lev\",\"deptht\", \"olevel\"],\n \"lev_bounds\": [\"lev_bounds\", \"deptht_bounds\",'olevel_bounds'],\n \"lon_bounds\": \"bounds_nav_lon\",\n \"lat_bounds\": \"bounds_nav_lat\",\n 'vertex': 'nvertex',\n \"bnds\":\"axis_nbounds\",\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n 'NorCPM1': {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'NorESM1-F': {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'NorESM2-LM': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'NorESM2-MM': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\", # i leave this here because the names are the same as for the other Nor models.\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n \n 'MPI-ESM1-2-HR': {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'MPI-ESM1-2-LR': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'MPI-ESM-1-2-HAM': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'CNRM-CM6-1-HR': {\n \"x\": \"x\",\n \"y\": \"y\",\n \"lon\": 'lon',\n \"lat\": 'lat',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": \"bounds_lon\",\n \"lat_bounds\": \"bounds_lat\",\n 'vertex': None,\n 'time_bounds': \"time_bounds\",\n },\n 'FIO-ESM-2-0': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'ACCESS-ESM1-5': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'ACCESS-CM2': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'INM-CM4-8': { # this is a guess.\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n 'INM-CM5-0': { # this is a guess.\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n 'MRI-ESM2-0':{\n \"x\": \"x\",\n \"y\": \"y\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n# \"lon_bounds\": 'x_bnds',\n# \"lat_bounds\": 'y_bnds',\n# 'vertex': None, # this is a mess. there is yet another convention. Will have to deal with this once I wrap xgcm into here.\n 'time_bounds': \"time_bnds\",\n },\n 'CIESM': { # this is a guess.\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n# \"lev\": \"lev\", # no 3d data available as of now\n# \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'KACE-1-0-G': { # this is a guess.\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n# \"lev\": \"lev\", # no 3d data available as of now\n# \"lev_bounds\": \"lev_bnds\",\n# \"lon_bounds\": \"vertices_longitude\",\n# \"lat_bounds\": \"vertices_latitude\",\n# \"lon_bounds\": \"vertices_longitude\",\n# \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n \n }\n # cast all str into lists\n for model in dim_name_dict.keys():\n for field in dim_name_dict[model].keys():\n if isinstance(dim_name_dict[model][field], str) or dim_name_dict[model][field] is None :\n dim_name_dict[model][field] = [dim_name_dict[model][field]]\n# add 'lon' and 'lat' as possible logical indicies for all models. This should take care of all regridded ocean output and all atmosphere models.\n if 'x' in dim_name_dict[model].keys():\n if not 'lon' in dim_name_dict[model]['x']:\n dim_name_dict[model]['x'].append('lon')\n \n if 'y' in dim_name_dict[model].keys():\n if not 'lat' in dim_name_dict[model]['y']:\n dim_name_dict[model]['y'].append('lat') \n return dim_name_dict",
"def convert_mtr_to_kittimot_format(data_list: List[Union[str, int, float]], frame_id: int) -> List[Union[str, int, float]]:\n annotation_list = []\n track_id = -1\n for data in data_list:\n annotation = [frame_id, -1]\n # print(\"type: \", str2id(bboxes['object_id']))\n object_type = data[0]\n truncated = -1\n occluded = -1\n alpha = -1\n bbox2d = [-1, -1, -1, -1]\n dimensions = data[1:4]\n location = data[4:7]\n rotation_y = data[7]\n\n annotation.append(object_type)\n annotation.append(truncated)\n annotation.append(occluded)\n annotation.append(alpha)\n annotation += bbox2d\n annotation += dimensions\n annotation += location\n annotation.append(rotation_y)\n annotation_list.append(annotation)\n return annotation_list\n\n\n\n \"\"\"\n convert KITTI MOTS format to AB3DMOT format\n\n \n @params:\n data_list: a list containing data in KITTI MOTs format\n \"\"\"",
"def read_satellite(filename, ftype):\n #ftype = 'l3c'\n #filename = '/gws/nopw/j04/cds_c3s_sst/output/v2.6.0/l3c/AVHRR19_G/2018/03/01/20180301120000-C3S-L3C_GHRSST-SSTskin-AVHRR19_G-ICDR2.0_day-v02.0-fv01.0.nc'\n #ftype = 'l4'\n #filename = '/gws/nopw/j04/cds_c3s_sst/public/data/ICDR_v2/Analysis/L4/v2.0/2018/01/01/20180101120000-C3S-L4_GHRSST-SSTdepth-OSTIA-GLOB_ICDR2.0-v02.0-fv01.0.nc'\n print \"Reading %s file: %s\" % (ftype, filename)\n \n # Read data - L4 or L3C (note L4 mask and L3C quality level have same array name)\n ncin = netCDF4.Dataset(filename)\n if ftype == 'l4':\n lon = ncin.variables['lon'][:]\n lat = ncin.variables['lat'][:]\n time_read = ncin.variables['time'][:]\n sst = ncin.variables['analysed_sst'][:]\n unc = ncin.variables['analysis_uncertainty'][:]\n sea_ice_frac = ncin.variables['sea_ice_fraction'][:]\n ql = ncin.variables['mask'][:]\n sstfill = ncin.variables['analysed_sst']._FillValue\n sstao = ncin.variables['analysed_sst'].add_offset\n sstsf = ncin.variables['analysed_sst'].scale_factor\n elif ftype == 'l3c':\n lon = ncin.variables['lon'][:]\n lat = ncin.variables['lat'][:]\n time_read = ncin.variables['time'][:]\n time_bnds = ncin.variables['time_bnds'][:]\n sst = ncin.variables['sea_surface_temperature'][:]\n sst_depth = ncin.variables['sea_surface_temperature_depth'][:]\n sst_dtime = ncin.variables['sst_dtime'][:]\n sst_depth_dtime = ncin.variables['sst_depth_dtime'][:]\n sses_bias = ncin.variables['sses_bias'][:]\n sses_sd = ncin.variables['sses_standard_deviation'][:]\n sst_depth_total_unc = ncin.variables['sst_depth_total_uncertainty'][:]\n l2p_flags = ncin.variables['l2p_flags'][:]\n ql = ncin.variables['quality_level'][:]\n wind_speed = ncin.variables['wind_speed'][:]\n large_scale_cor_unc = ncin.variables['large_scale_correlated_uncertainty'][:]\n synop_cor_unc = ncin.variables['synoptically_correlated_uncertainty'][:]\n uncor_unc = ncin.variables['uncorrelated_uncertainty'][:]\n adj_unc = ncin.variables['adjustment_uncertainty'][:]\n aerosol_dyn_ind = ncin.variables['aerosol_dynamic_indicator'][:]\n sens = ncin.variables['sensitivity'][:]\n tfill = ncin.variables['sst_dtime']._FillValue\n sstfill = ncin.variables['sea_surface_temperature']._FillValue\n sstao = ncin.variables['sea_surface_temperature'].add_offset\n sstsf = ncin.variables['sea_surface_temperature'].scale_factor\n else:\n print 'ftype not recognised or supported'\n \n # Create time field\n # -> If L4 then create a time field set to time in L4 file\n # -> Also add a time fill value to keep coding simple later on\n if ftype == 'l4':\n time = np.empty((7200,3600))\n time[:,:] = time_read\n tfill = -2147483648\n else:\n time = copy.deepcopy(sst_dtime) # Need to make a hard copy\n mask = sst_dtime.mask == False; mask = mask[0,:,:]\n row, col = np.where(mask==True)\n time.data[0, row, col] = time.data[0,row, col] + time_read\n \n # Create output structure\n if ftype == 'l4':\n data = dict(lon=lon,\n lat=lat,\n time_read=time_read,\n time=time,\n sst=sst,\n unc=unc,\n sea_ice_frac=sea_ice_frac,\n ql=ql,\n tfill=tfill,\n sstfill=sstfill,\n sstao=sstao,\n sstsf=sstsf)\n elif ftype == 'l3c':\n data = dict(lon=lon,\n lat=lat,\n time_read=time_read,\n time=time,\n time_bnds=time_bnds,\n sst=sst,\n sst_depth=sst_depth,\n sst_dtime=sst_dtime,\n sst_depth_dtime=sst_depth_dtime,\n sses_bias=sses_bias,\n sses_sd=sses_sd,\n sst_depth_total_unc=sst_depth_total_unc,\n l2p_flags=l2p_flags,\n ql=ql,\n wind_speed=wind_speed,\n large_scale_cor_unc=large_scale_cor_unc,\n synop_cor_unc=synop_cor_unc,\n uncor_unc=uncor_unc,\n adj_unc=adj_unc,\n aerosol_dyn_ind=aerosol_dyn_ind,\n sens=sens,\n tfill=tfill,\n sstfill=sstfill,\n sstao=sstao,\n sstsf=sstsf)\n else:\n print 'ftype not recognised or supported'\n \n return data",
"def kaldi2dag(self, file_path):\n raise NotImplementedError",
"def nc2gtiff(input_name, epsg_in=3413, epsg_out=3857):\n sub_name=[\"dX\",\"dY\"]\n \n for sub in sub_name:\n result = os.system(\"echo ''\")\n #netCDF to geotiff (EPSG code is not changed) \n out_name = \"./data/tiff_raw/{}_{}_tmp.tiff\".format(os.path.basename(input_name)[:-3],sub)\n #c is shell command\n #options\n #-a_srs:EPSGcode of input files\n #-of:output format\n #\n print(\"nc2gtiff\")\n c = \"gdal_translate -a_srs '+proj=stere +a=6378273 +b=6356889.44891 +lat_0=90 +lat_ts=70 +lon_0=-45' NETCDF:'{in_name}':{sub} \\\n -of 'Gtiff' \\\n '{out_name}'\".format(epsg=epsg_in,sub=sub, \\\n in_name=input_name,out_name=out_name) \n result = os.system(c) #run shell command\n #print(c) #if you want to see the filled c, remove '#' in the head of this line\n if result!=0: # if it raises error, return filename \n print(input_name, result, \"translate\")\n \n result = os.system(\"echo ''\")\n\n #geotiff EPSG_in to EPCG_out\n print(\"geotiff2geotiff\")\n target_name=\"./data/tiff_target/{}_{}.tiff\".format(os.path.basename(input_name)[:-3],sub)\n c = \"gdalwarp -overwrite -s_srs '+proj=stere +a=6378273 +b=6356889.44891 +lat_0=90 +lat_ts=70 +lon_0=-45' {out_name} -r cubic\\\n {target_name} -t_srs EPSG:{epsg_out} -of \\\n 'GTIFF'\".format(out_name=out_name, target_name=target_name, \n epsg_in=epsg_in, epsg_out=epsg_out)\n result = os.system(c) # run shell command\n if result!=0:\n print(input_name, result, \"warp\")",
"def open_igra_metadata(filename):\n import pandas as pd\n infos = \"\"\"\n IGRAID 1- 11 Character\n WMOID 13- 17 Integer\n NAME 19- 48 Character\n NAMFLAG 50- 50 Character\n LATITUDE 52- 60 Real\n LATFLAG 62- 62 Character\n LONGITUDE 64- 72 Real\n LONFLAG 74- 74 Character\n ELEVATION 76- 81 Real\n ELVFLAG 83- 83 Character\n YEAR 85- 88 Integer\n MONTH 90- 91 Integer\n DAY 93- 94 Integer\n HOUR 96- 97 Integer\n DATEIND 99- 99 Integer\n EVENT 101-119 Character\n ALTIND 121-122 Character\n BEFINFO 124-163 Character\n BEFFLAG 164-164 Character\n LINK 166-167 Character\n AFTINFO 169-208 Character\n AFTFLAG 209-209 Character\n REFERENCE 211-235 Character\n COMMENT 236-315 Character\n UPDCOM 316-346 Character\n UPDDATE 348-354 Character\n \"\"\"\n import numpy as np\n colspecs = []\n header = []\n types = {}\n for iline in infos.splitlines():\n if iline == '':\n continue\n ih = iline[0:11].strip().lower()\n header.append(ih)\n ii = int(iline[13:16]) - 1\n ij = int(iline[17:20])\n colspecs.append((ii, ij))\n it = iline[22:].strip()\n if it == 'Character':\n it = 'str'\n elif it == 'Real':\n it = 'float'\n else:\n it = 'int'\n types[ih] = it\n\n data = pd.read_fwf(filename, colspecs=colspecs, header=None, dtype=types, names=header)\n data = data.replace('nan', '')\n data['date'] = pd.to_datetime((data.year * 1000000 +\n np.where(data.month.values == 99, 6, data.month.values) * 10000 +\n np.where(data.day.values == 99, 15, data.day.values) * 100 +\n np.where(data.hour.values == 99, 0, data.hour.values)).apply(str), format='%Y%m%d%H')\n return data",
"def get_kml_dict(self,name,filename):\n\n lon1,lon2,lat1,lat2=self.get_bounds()\n d={'lat1':lat1,'lat2':lat2,'lon1':lon1,'lon2':lon2, \\\n 'name':name,'filename':filename,'time':self.get_time()}\n return d",
"def loadGMSHModel(modelfile, scale, dx=0.0, dy=0.0, dz=0.0, avg=True,\n neg_normal=False, texture=None):\n\n # noinspection PyPep8Naming,PyUnboundLocalVariable,PyShadowingNames,PyUnusedLocal\n def load(gmshfile, scale, dx, dy, dz):\n \"\"\"Carga un archivo gmsh y retorna 3 listas, una lista de vertices, otra de normales y otra de normales promedio. \\n\n Toma como argumento el archivo, una escala y la posicion (dx,dy,dz)\"\"\"\n\n # noinspection PyPep8Naming,PyShadowingNames\n def getAveNormals(nodes, elems):\n \"\"\"Calcula las normales promedio por cada vertice\"\"\"\n nodetrilist = []\n for nodenum in range(len(nodes)):\n nodetrilist.append([])\n for elemnum in range(len(elems)):\n if nodenum in elems[elemnum]:\n nodetrilist[nodenum].append(elemnum)\n avenorms = []\n for tri in nodetrilist:\n aveNi = 0.0\n aveNj = 0.0\n aveNk = 0.0\n denom = max(float(len(tri)), 1)\n for elem in tri:\n vert1 = [nodes[elems[elem][0]][0], nodes[elems[elem][0]][1],\n nodes[elems[elem][0]][2]]\n vert2 = [nodes[elems[elem][1]][0], nodes[elems[elem][1]][1],\n nodes[elems[elem][1]][2]]\n vert3 = [nodes[elems[elem][2]][0], nodes[elems[elem][2]][1],\n nodes[elems[elem][2]][2]]\n normals = getNormals(vert1, vert2, vert3)\n aveNi += normals[0]\n aveNj += normals[1]\n aveNk += normals[2]\n avenorms.append([aveNi / denom, aveNj / denom, aveNk / denom])\n return avenorms\n\n # noinspection PyPep8Naming\n def getNormals(vertA, vertB, vertC):\n \"\"\"Calcula las normales por cada 3 vertices\"\"\"\n xA = vertA[0]\n xB = vertB[0]\n xC = vertC[0]\n yA = vertA[1]\n yB = vertB[1]\n yC = vertC[1]\n zA = vertA[2]\n zB = vertB[2]\n zC = vertC[2]\n ABx = xB - xA\n ABy = yB - yA\n ABz = zB - zA\n BCx = xC - xB\n BCy = yC - yB\n BCz = zC - zB\n Nx = ABy * BCz - ABz * BCy\n Ny = ABz * BCx - ABx * BCz\n Nz = ABx * BCy - ABy * BCx\n VecMag = math.sqrt(Nx ** 2 + Ny ** 2 + Nz ** 2)\n Ni = Nx / VecMag\n Nj = Ny / VecMag\n Nk = Nz / VecMag\n return [Ni, Nj, Nk]\n\n # Lee el archivo\n try:\n infile = open(gmshfile)\n except:\n raise Exception(\"el archivo del modelo no existe\")\n\n # Crea el modeo\n try:\n gmshlines = infile.readlines()\n readnodes = False\n readelems = False\n skipline = 0\n elems = []\n lnum = 0\n nnodes = 0\n for line in gmshlines:\n if \"$Nodes\" in line:\n readnodes = True\n skipline = 2\n nnodes = int(gmshlines[lnum + 1].strip())\n nodes = []\n for i in range(nnodes):\n nodes.append(99999.9)\n elif \"$EndNodes\" in line:\n readnodes = False\n skipline = 1\n elif \"$Elements\" in line:\n readelems = True\n skipline = 2\n elif \"$EndElements\" in line:\n readelems = False\n skipline = 1\n if skipline < 1:\n if readnodes:\n nXYZ = line.strip().split()\n nodenum = int(nXYZ[0]) - 1\n nX = float(nXYZ[1]) * scale + dx\n nY = float(nXYZ[2]) * scale + dy\n nZ = float(nXYZ[3]) * scale + dz\n if neg_normal:\n nZ *= -1\n nodes[nodenum] = [nX, nY, nZ]\n elif readelems:\n n123 = line.split()\n if n123[1] == \"2\":\n n1 = int(n123[-3]) - 1\n n2 = int(n123[-1]) - 1\n n3 = int(n123[-2]) - 1\n elems.append([n1, n2, n3])\n else:\n skipline -= 1\n lnum += 1\n triarray = []\n normarray = []\n avenorms = []\n nodeavenorms = getAveNormals(nodes, elems)\n for elem in elems:\n vert1 = [nodes[elem[0]][0], nodes[elem[0]][1],\n nodes[elem[0]][2]]\n vert2 = [nodes[elem[1]][0], nodes[elem[1]][1],\n nodes[elem[1]][2]]\n vert3 = [nodes[elem[2]][0], nodes[elem[2]][1],\n nodes[elem[2]][2]]\n avenorm0 = nodeavenorms[elem[0]]\n avenorm1 = nodeavenorms[elem[1]]\n avenorm2 = nodeavenorms[elem[2]]\n normals = getNormals(vert1, vert2, vert3)\n triarray.append(vert1)\n triarray.append(vert2)\n triarray.append(vert3)\n normarray.append(normals)\n normarray.append(normals)\n normarray.append(normals)\n avenorms.append(avenorm0)\n avenorms.append(avenorm1)\n avenorms.append(avenorm2)\n return triarray, normarray, avenorms\n\n except:\n raise Exception(\"error al cargar el modelo\")\n\n vertex, norm, avgnorm = load(modelfile, scale, float(dx), float(dy),\n float(dz))\n if avg:\n return VboObject(vbo.VBO(array(vertex, 'f')),\n vbo.VBO(array(avgnorm, 'f')), len(vertex), texture)\n else:\n return VboObject(vbo.VBO(array(vertex, 'f')), vbo.VBO(array(norm, 'f')),\n len(vertex), texture)",
"def main():\n parser = argparse.ArgumentParser(description=\"Align ORB-SLAM results with ground truth according to camera orientation in AirSim.\")\n parser.add_argument(\"filename\", help = \"Trajectory in TUM format.\")\n parser.add_argument(\"output\", help = \"Output file.\")\n \n parser.add_argument(\"roll\", help=\"Camera Roll.\")\n parser.add_argument(\"pitch\", help=\"Camera Pitch.\")\n parser.add_argument(\"yaw\", help=\"Camera Yaw.\")\n\n args = parser.parse_args()\n\n roll = float(args.roll)*m.pi/180\n pitch = float(args.pitch)*m.pi/180\n yaw = float(args.yaw)*m.pi/180\n\n file = open(args.filename, \"r\")\n newFile = open(args.output, \"w\")\n \n for line in file:\n values = line.split()\n x = float(values[3])\n y = float(values[1])\n z = float(values[2])\n position = np.array([[x],[y],[z]])\n position = Rx(roll) @ Ry(pitch) @ Rz(yaw) @ position\n\n newFile.write(\"%s %s %s %s %s %s %s %s\\n\" %(values[0], position[0,0], position[1,0], position[2,0], values[4], values[5], values[6], values[7]))\n\n file.close\n newFile.close\n print(\"Saved as \" + args.output)\n\n return",
"def export_alembic(self, path, geo_nodes, use_local_space=False):\n if os.path.exists(path):\n raise RuntimeError('Given path aleady exist: {}'.format(path))\n\n export_space = '' if use_local_space else '-worldSpace'\n args = [\n '-uv',\n export_space,\n '-frameRange', str(self._model.frame_in - 1),\n str(self._model.frame_out + 1),\n '-frameRelativeSample', str(self._model.motion_blur_in),\n '-frameRelativeSample', '0',\n '-frameRelativeSample', str(self._model.motion_blur_out),\n '-file', path,\n ]\n for node in geo_nodes:\n if mc.nodeType(node) != 'transform':\n node = mc.listRelatives(node, parent=True, fullPath=True)[0]\n args.extend([\n '-root', node\n ])\n\n mc.AbcExport(jobArg=[' '.join(args)])",
"def vtkReformat(inPath, outPath):\n # Get size of map\n inFile = open(inPath,\"rb\")\n lineList = inFile.readlines()\n for line in lineList:\n if line.lower().strip().startswith(\"dimensions\"):\n size = map(int,line.split(\" \")[1:dimension+1])\n break\n inFile.close()\n\n if dimension == 2: size += [0]\n\n outFile = open(outPath,\"wb\")\n for (i,line) in enumerate(lineList):\n if i == 1:\n newline = line.lstrip(line.rstrip(\"\\n\"))\n line = \"lddmm 8 0 0 {0} {0} 0 0 {1} {1} 0 0 {2} {2}\".format(size[2]-1, size[1]-1, size[0]-1) + newline\n outFile.write(line)",
"def read(self, url: str):\n\n log.info(f\"Downloading KMZ file {basename(url)}\")\n kml = self.fetch(url)\n\n log.info(\"Parsing KML data\")\n self.iter_elems = iterparse(BytesIO(kml), events=(\"start\", \"end\"), resolve_entities=False)\n\n prod_items = {\n \"issuer\": \"Issuer\",\n \"product_id\": \"ProductID\",\n \"generating_process\": \"GeneratingProcess\",\n \"issue_time\": \"IssueTime\",\n }\n\n nsmap = None\n\n # Get Basic Metadata\n prod_definition = None\n prod_definition_tag = None\n for event, element in self.iter_elems:\n if event == \"start\":\n # get namespaces from root element\n if nsmap is None:\n nsmap = element.nsmap\n prod_definition_tag = f\"{{{nsmap['dwd']}}}ProductDefinition\"\n elif event == \"end\":\n if element.tag == prod_definition_tag:\n prod_definition = element\n # stop processing after head\n # leave forecast data for iteration\n break\n\n self.metadata = {k: prod_definition.find(f\"{{{nsmap['dwd']}}}{v}\").text for k, v in prod_items.items()}\n self.metadata[\"issue_time\"] = dt.datetime.fromisoformat(self.metadata[\"issue_time\"])\n\n # Get time steps.\n timesteps = prod_definition.findall(\n \"dwd:ForecastTimeSteps\",\n nsmap,\n )[0]\n self.timesteps = [dt.datetime.fromisoformat(i.text) for i in timesteps.getchildren()]\n\n # save namespace map for later iteration\n self.nsmap = nsmap",
"def reproject_vector( path, epsg_from=None, epsg_to=None):\n\n if not epsg_to: raise Exception(\"please, specify the output EPSG codes\")\n\n inDataSet = None\n outDataSet = None\n inFeature = None\n outFeature = None\n outLayer = None\n\n try:\n\n driver = ogr.GetDriverByName('ESRI Shapefile')\n inDataSet = driver.Open(path, 0) # 0 means read-only\n\n # define input SpatialReference\n if not epsg_from:\n layer = inDataSet.GetLayer()\n inSpatialRef = layer.GetSpatialRef()\n else:\n inSpatialRef = osr.SpatialReference()\n inSpatialRef.ImportFromEPSG(epsg_from)\n\n # define output SpatialReference\n outSpatialRef = osr.SpatialReference()\n outSpatialRef.ImportFromEPSG(epsg_to)\n\n # create the CoordinateTransformation\n coordTrans = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)\n\n # get the first input layer and the geometry type\n inLayer = inDataSet.GetLayer()\n geotype = inLayer.GetGeomType()\n lname = inLayer.GetName()\n\n drv = ogr.GetDriverByName(\"ESRI Shapefile\")\n outDataSet = drv.CreateDataSource(\"/vsimem/memory.shp\")\n\n outLayer = outDataSet.CreateLayer(lname, srs=outSpatialRef, geom_type=geotype)\n\n # add fields\n inLayerDefn = inLayer.GetLayerDefn()\n\n for i in range(0, inLayerDefn.GetFieldCount()):\n fieldDefn = inLayerDefn.GetFieldDefn(i)\n outLayer.CreateField(fieldDefn)\n\n # get the output layer\"s feature definition\n outLayerDefn = outLayer.GetLayerDefn()\n\n counter = 1\n\n # loop through the input features\n inFeature = inLayer.GetNextFeature()\n while inFeature:\n # get the input geometry\n geom = inFeature.GetGeometryRef()\n # reproject the geometry\n geom.Transform(coordTrans)\n # create a new feature\n outFeature = ogr.Feature(outLayerDefn)\n # set the geometry and attribute\n outFeature.SetGeometry(geom)\n for i in range(0, outLayerDefn.GetFieldCount()):\n outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i))\n # add the feature to the shapefile\n outLayer.CreateFeature(outFeature)\n\n # destroy the features and get the next input feature\n if outFeature: outFeature = None\n inFeature = inLayer.GetNextFeature()\n\n counter += 1\n #print(counter)\n\n return outDataSet\n\n except RuntimeError as err:\n raise err\n except Exception as e:\n raise e\n\n finally:\n if inDataSet: outDataSet == None # give back control to C++\n if outDataSet: outDataSet == None\n if outLayer: outLayer == None\n if inFeature: inFeature == None\n if outFeature: outFeature = None",
"def airports(osm_path): \n return (retrieve(osm_path,'multipolygons',['aeroway'],**{'aeroway':[\"='aerodrome'\"]})).rename(columns={'aeroway': 'asset'})",
"def from_gmsh_file(filename):\n\n from pyparsing import Word, Optional, nums, Combine, Literal, \\\n CaselessLiteral, Group, OneOrMore, StringEnd, restOfLine, \\\n ParseException, alphanums, Keyword, ZeroOrMore\n\n e = CaselessLiteral(\"E\")\n inum = Word(\"+-\"+nums)\n fnum = Combine(\n Word( \"+-\"+nums, nums ) + Optional(\".\"+Optional(Word(nums))) +\n Optional(e+Word(\"+-\"+nums,nums))\n )\n\n semi = Literal(\";\").suppress()\n colon = Literal(\",\").suppress()\n lpar = Literal(\"(\").suppress()\n rpar = Literal(\")\").suppress()\n lbrace = Literal(\"{\").suppress()\n rbrace = Literal(\"}\").suppress()\n eq = Literal(\"=\").suppress()\n\n point = Group(\n Keyword(\"Point\")+lpar+inum+rpar+eq+\n Group(lbrace+fnum+colon+fnum+colon+fnum+colon+fnum+rbrace)+semi\n )\n line = Group(\n Keyword(\"Line\")+lpar+inum+rpar+eq+\n Group(lbrace+inum+colon+inum+rbrace)+semi\n )\n lineloop = Group(\n Keyword(\"Line Loop\")+lpar+inum+rpar+eq+\n Group(lbrace+inum+OneOrMore(colon+inum)+rbrace)+semi\n )\n circle = Group(\n Keyword(\"Circle\")+lpar+inum+rpar+eq+\n Group(lbrace+inum+colon+inum+colon+inum+rbrace)+semi\n )\n planesurface = Group(\n Keyword(\"Plane Surface\")+lpar+inum+rpar+eq+\n Group(lbrace+inum+rbrace)+semi\n )\n ruledsurface = Group(\n Keyword(\"Ruled Surface\")+lpar+inum+rpar+eq+\n Group(lbrace+inum+rbrace)+semi\n )\n surfaceloop = Group(\n Keyword(\"Surface Loop\")+lpar+inum+rpar+eq+\n Group(lbrace+inum+OneOrMore(colon+inum)+rbrace)+semi\n )\n volume = Group(\n Keyword(\"Volume\")+lpar+inum+rpar+eq+\n Group(lbrace+inum+rbrace)+semi\n )\n physicalsurface = Group(\n Keyword(\"Physical Surface\")+lpar+inum+rpar+eq+\n Group(lbrace+inum+ZeroOrMore(colon+inum)+rbrace)+semi\n )\n physicalvolume = Group(\n Keyword(\"Physical Volume\")+lpar+inum+rpar+eq+\n Group(lbrace+inum+ZeroOrMore(colon+inum)+rbrace)+semi\n )\n skip1 = Group(\n Word(alphanums)+eq+fnum+semi\n )\n\n comment = Group( Literal(\"//\")+restOfLine).suppress()\n\n command = point | line | lineloop | circle | planesurface | ruledsurface | \\\n surfaceloop | volume | physicalsurface | physicalvolume | comment \\\n | skip1\n\n grammar= OneOrMore(command)+StringEnd()\n\n try:\n tokens= grammar.parseFile(filename)\n except ParseException as err:\n print(err.line)\n print(\" \"*(err.column-1) + \"^\")\n print(err)\n raise err\n\n lineloops={}\n surfaceloops={}\n geo=geometry()\n for x in tokens:\n if x[0]==\"Point\":\n geo.addpoint(int(x[1]),[float(x[2][0]),float(x[2][1]),float(x[2][2])])\n elif x[0]==\"Line\":\n assert len(x[2])==2\n geo.addline(int(x[1]),[int(x[2][0]),int(x[2][1])])\n elif x[0]==\"Circle\":\n assert len(x[2])==3\n geo.addline(int(x[1]),[int(x[2][0]),int(x[2][2])])\n #geo.add1(geom.circle(int(x[1]),int(x[2][0]),int(x[2][1]),\n # int(x[2][2])))\n elif x[0]==\"Line Loop\":\n lineloops[int(x[1])]=[int(y) for y in x[2]]\n elif x[0]==\"Plane Surface\":\n assert len(x[2])==1\n geo.addsurface(int(x[1]),lineloops[int(x[2][0])])\n elif x[0]==\"Ruled Surface\":\n assert len(x[2])==1\n geo.addsurface(int(x[1]),lineloops[int(x[2][0])])\n elif x[0]==\"Surface Loop\":\n surfaceloops[int(x[1])]=[int(y) for y in x[2]]\n elif x[0]==\"Volume\":\n assert len(x[2])==1\n geo.addvolume(int(x[1]),surfaceloops[int(x[2][0])])\n elif x[0]==\"Physical Surface\":\n geo.addphysicalsurface(int(x[1]),[int(y) for y in x[2]])\n elif x[0]==\"Physical Volume\":\n geo.addphysicalvolume(int(x[1]),[int(y) for y in x[2]])\n else:\n raise \"Unsupported entity: \"+x[0]\n\n return geo",
"def gen_landmark_data(src_txt_path, net, augmet=False):\r\n print(\">>>>>> Start landmark data create...Stage: %s\" % net)\r\n save_folder = os.path.join(root_path, '../DATA/12/')\r\n save_image_folder = os.path.join(save_folder, 'train_%s_landmark_aug' % net)\r\n size_of_net = {'PNet': 12, 'RNet': 24, 'ONet': 48}\r\n if net not in size_of_net:\r\n raise Exception(\"The net type error!\")\r\n if not os.path.isdir(save_image_folder):\r\n os.makedirs(save_image_folder)\r\n print('create folder: ', save_image_folder)\r\n save_f = open(os.path.join(save_folder, 'landmark_%s_aug.txt' % size_of_net[net]), 'w')\r\n image_count = 0\r\n # image_path bbox landmark(5*2)\r\n bbox_landmark_info = get_bbox_landmark_from_txt(src_txt_path, data_path='../DATA/landmarks_traindata', with_landmark=True)\r\n for img_path, bbox, landmark_gt in bbox_landmark_info:\r\n f_imgs = list()\r\n f_landmarks = list()\r\n img = cv2.imread(img_path)\r\n assert(img is not None)\r\n img_h, img_w, img_c = img.shape\r\n gt_box = np.array([bbox.left, bbox.top, bbox.right, bbox.bottom])\r\n f_face = img[bbox.top: bbox.bottom+1, bbox.left: bbox.right+1]\r\n f_face = cv2.resize(f_face, (size_of_net[net], size_of_net[net]))\r\n landmark = np.zeros((5, 2))\r\n # normalize\r\n for index, one in enumerate(landmark_gt):\r\n rv = ((one[0]-gt_box[0])/(gt_box[2]-gt_box[0]), (one[1]-gt_box[1])/(gt_box[3]-gt_box[1]))\r\n landmark[index] = rv\r\n f_imgs.append(f_face)\r\n f_landmarks.append(landmark.reshape(10))\r\n landmark = np.zeros((5, 2))\r\n if augmet:\r\n x1, y1, x2, y2 = gt_box\r\n gt_width = x2 - x1 + 1\r\n gt_height = y2 - y1 + 1\r\n if max(gt_width, gt_height) < 40 or x1 < 0 or y1 < 0:\r\n continue\r\n # random shift\r\n for i in range(10):\r\n bbox_size = np.random.randint(int(min(gt_width, gt_height) * 0.8), np.ceil(1.25 * max(gt_width, gt_height)))\r\n # delta_x and delta_y are offsets of (x1, y1)\r\n # max can make sure if the delta is a negative number , x1+delta_x >0\r\n # parameter high of randint make sure there will be intersection between bbox and cropped_box\r\n delta_x = np.random.randint(-gt_width*0.2, gt_width*0.2)\r\n delta_y = np.random.randint(-gt_height*0.2, gt_height*0.2)\r\n nx1 = int(max(x1+gt_width/2 - bbox_size/2 + delta_x, 0))\r\n ny1 = int(max(y1+gt_height/2 - bbox_size/2 + delta_y, 0))\r\n nx2 = nx1 + bbox_size\r\n ny2 = ny1 + bbox_size\r\n if nx2 > img_w or ny2 > img_h:\r\n continue\r\n # print(nx1, ny1, nx2, ny2)\r\n crop_box = np.array([nx1, ny1, nx2, ny2])\r\n cropped_img = img[ny1: ny2+1, nx1: nx2+1, :]\r\n resized_img = cv2.resize(cropped_img, (size_of_net[net], size_of_net[net]))\r\n iou = calc_iou(crop_box, np.expand_dims(gt_box, 0))\r\n if iou <= 0.65:\r\n continue\r\n f_imgs.append(resized_img)\r\n # normalize\r\n for index, one in enumerate(landmark_gt):\r\n rv = ((one[0]-nx1)/bbox_size, (one[1]-ny1/bbox_size))\r\n landmark[index] = rv\r\n f_landmarks.append(landmark.reshape(10))\r\n landmark = np.zeros((5, 2))\r\n # get last landmark from list\r\n landmark_ = f_landmarks[-1].reshape((-1, 2))\r\n bbox = BBox([nx1, ny1, nx2, ny2])\r\n\r\n # mirror\r\n if random.choice([0, 1]) > 0:\r\n face_flipped, landmark_flipped = flip(resized_img, landmark_)\r\n face_flipped = cv2.resize(face_flipped, (size_of_net[net], size_of_net[net]))\r\n # c*h*w\r\n f_imgs.append(face_flipped)\r\n f_landmarks.append(landmark_flipped.reshape(10))\r\n # rotate\r\n if random.choice([0, 1]) > 0:\r\n face_rotated_by_alpha, landmark_rotated = rotate(img, bbox,\r\n bbox.reproject_landmark(landmark_), 5)\r\n # landmark offset\r\n landmark_rotated = bbox.project_landmark(landmark_rotated)\r\n face_rotated_by_alpha = cv2.resize(face_rotated_by_alpha, (size_of_net[net], size_of_net[net]))\r\n f_imgs.append(face_rotated_by_alpha)\r\n f_landmarks.append(landmark_rotated.reshape(10))\r\n\r\n # flip\r\n face_flipped, landmark_flipped = flip(face_rotated_by_alpha, landmark_rotated)\r\n face_flipped = cv2.resize(face_flipped, (size_of_net[net], size_of_net[net]))\r\n f_imgs.append(face_flipped)\r\n f_landmarks.append(landmark_flipped.reshape(10))\r\n # anti-clockwise rotation\r\n if random.choice([0, 1]) > 0:\r\n face_rotated_by_alpha, landmark_rotated = rotate(img, bbox, bbox.reproject_landmark(landmark_), -5)\r\n landmark_rotated = bbox.project_landmark(landmark_rotated)\r\n face_rotated_by_alpha = cv2.resize(face_rotated_by_alpha, (size_of_net[net], size_of_net[net]))\r\n f_imgs.append(face_rotated_by_alpha)\r\n f_landmarks.append(landmark_rotated.reshape(10))\r\n\r\n face_flipped, landmark_flipped = flip(face_rotated_by_alpha, landmark_rotated)\r\n face_flipped = cv2.resize(face_flipped, (size_of_net[net], size_of_net[net]))\r\n f_imgs.append(face_flipped)\r\n f_landmarks.append(landmark_flipped.reshape(10))\r\n f_imgs, f_landmarks = np.asarray(f_imgs), np.asarray(f_landmarks)\r\n for i in range(len(f_imgs)):\r\n # if np.sum(np.where(f_landmarks[i] <= 0, 1, 0)) > 0:\r\n # print('skip image: %d' % i)\r\n # print(f_landmarks[i])\r\n # continue\r\n # if np.sum(np.where(f_landmarks[i] >= 1, 1, 0)) > 0:\r\n # print('skip image: %d', i)\r\n # print(f_landmarks[i])\r\n # continue\r\n path = os.path.join(save_image_folder, '%d.jpg' % image_count)\r\n cv2.imwrite(path, f_imgs[i])\r\n landmarks = map(str, list(f_landmarks[i]))\r\n save_f.write(path + ' -2 ' + ' '.join(landmarks) + '\\n')\r\n image_count += 1\r\n print_str = \"\\rCount: {}\".format(image_count)\r\n sys.stdout.write(print_str)\r\n sys.stdout.flush()\r\n save_f.close()\r\n print('\\n Landmark create done!')",
"def get_wmt_enfr_train_set(path):\n filename = \"training-giga-fren.tar\"\n maybe_download_and_extract(filename, path, _WMT_ENFR_TRAIN_URL, extract=True)\n train_path = os.path.join(path, \"giga-fren.release2.fixed\")\n gunzip_file(train_path + \".fr.gz\", train_path + \".fr\")\n gunzip_file(train_path + \".en.gz\", train_path + \".en\")\n return train_path"
] | [
"0.726774",
"0.66690755",
"0.66451776",
"0.6530712",
"0.6462909",
"0.62168556",
"0.5969086",
"0.5435771",
"0.53939426",
"0.5367328",
"0.53303623",
"0.53256774",
"0.5312925",
"0.5307262",
"0.52901286",
"0.5280847",
"0.52789676",
"0.5241874",
"0.5188435",
"0.51814985",
"0.51693153",
"0.5149364",
"0.5133601",
"0.5128472",
"0.5108604",
"0.5093224",
"0.509204",
"0.50789005",
"0.5078186",
"0.5067663",
"0.50421655",
"0.5040157",
"0.5031818",
"0.5030836",
"0.5026152",
"0.4995626",
"0.4994394",
"0.49935648",
"0.49886173",
"0.4958442",
"0.49490562",
"0.494407",
"0.4940474",
"0.49310654",
"0.49200654",
"0.49130267",
"0.4911265",
"0.49099395",
"0.49033377",
"0.48931974",
"0.48873875",
"0.48817492",
"0.48803428",
"0.48731956",
"0.48691323",
"0.48686522",
"0.48662922",
"0.48660344",
"0.48635423",
"0.48559663",
"0.4855817",
"0.48484418",
"0.4844975",
"0.4840121",
"0.48386148",
"0.48354483",
"0.4828225",
"0.48115787",
"0.48040688",
"0.47955787",
"0.4789019",
"0.47868806",
"0.4782446",
"0.47815943",
"0.47804502",
"0.47768605",
"0.47752404",
"0.47536516",
"0.4747668",
"0.47377217",
"0.47371435",
"0.4736622",
"0.47358853",
"0.47333756",
"0.47257808",
"0.47240785",
"0.47218502",
"0.47204623",
"0.47163594",
"0.47061825",
"0.47005016",
"0.46991402",
"0.46990442",
"0.46968696",
"0.4696218",
"0.46886107",
"0.46757004",
"0.4667495",
"0.46562108",
"0.4652185"
] | 0.6455014 | 5 |
converts kml files to open airspace files | def kml_2_open_airspace_and_json_format(self, full_path):
# read file
f = open(full_path,'r')
kml = f.readlines()
f.close()
# find airspaces
"""Placemark >
< name > Bremen - Blumenthal
Thermikplatte < / name >
< styleUrl > # inline10</styleUrl>
< Polygon >
< tessellate > 1 < / tessellate >
< outerBoundaryIs >
< LinearRing >
< coordinates >
8.529121049900063, 53.19549566929423, 0
8.52324583919868, 53.21131939607898, 0
8.545439298799483, 53.23055800702935, 0
8.588991466114615, 53.23047069814625, 0
8.575289966189502, 53.20745451706468, 0
8.560633120477348, 53.19724609335408, 0
8.529121049900063, 53.19549566929423, 0
< / coordinates >
< / LinearRing >
< / outerBoundaryIs >
< / Polygon >
< / Placemark >"""
container = []
idxLine = 0
did_not_pass_main_folder = True
list_of_airspace_types_included = []
while idxLine < len(kml):
#print(kml[idxLine])
#if '<Folder>' in kml[idxLine] and did_not_pass_main_folder:
# # we have to jump over the first folder
# print(f'Reading everything inside folder: {kml[idxLine]}')
# did_not_pass_main_folder = False
if '<Folder>' in kml[idxLine]: # begin of airspace
as_type = kml[idxLine+1].replace('\t','').replace('<name>','').replace('</name>\n','') # <name>B</name>
print('Reading AS-types: ' + as_type)
list_of_airspace_types_included.append(as_type)
#if not (as_type == 'A' or as_type == 'B'):
# print('#### Check Folder / Airspace Types, must be "A" or "B" and try again (current %s)' % as_type)
# msgbox('Check Folder / Airspace Types, are not "A" or "B" (current %s). Airspace E will be used for export.' % as_type)
# as_type = 'E'
if '<Placemark' in kml[idxLine]: # begin of airspace
container = []
if '</Placemark' in kml[idxLine]: # end of airspace
# make sure only Polygons are stored
for as_line in container:
if '<Polygon>' in as_line:
idx_lookAt_start = None
for idx, line_of_container in enumerate(container):
if "<LookAt>" in line_of_container:
idx_lookAt_start = idx
if "</LookAt>" in line_of_container:
idx_lookAt_end = idx
# Remove lookAt lines if necessary
if idx_lookAt_start:
container = container[0:idx_lookAt_start] + container[idx_lookAt_end+1::] # cut out look at part
# append airspace to airspace list as airspace class
self.airspaces.append(Airspace(lines=container, file_type='kml', as_type=as_type))
container.append(kml[idxLine])
idxLine += 1
print('Loaded %d airspaces from KML-file (%s)' %(len(self.airspaces),full_path))
# summary
outlines = ['* KML conversion file, rename this line']
json_dict = {"circles": [], "polygons": []}
for airspace in self.airspaces:
# prepare open-airspace formate
outlines.append('\n\n') # separate airspaces
outlines.extend(airspace.txt_lines)
# prepare json
json_dict['polygons'].append(airspace.json_dict)
# write open airspace format
target_path = full_path[:-4] + '_converted.txt'
# uisave dialog
target_path = filesavebox(default=target_path, filetypes="*.txt")
if target_path is None:
print('Airspace conversion was aborted by the user')
quit()
f = open(target_path,'w')
f.writelines(outlines)
f.close()
print('Result was written to: %s' % target_path)
# write json:
target_path_json = target_path[:-4] + '.json'
json_string = json.dumps(json_dict)
json_file = open(target_path_json, "w")
json_file.write(json_string)
json_file.close()
# write list of airspace files for index.html for leaflet map
print('The following airspace types have been converted:')
print(list_of_airspace_types_included) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def open_airspace_format_2_kml(self, source_file_txt):\n # load template for kml file\n self.load_kml_template(self.full_path_kml_template)\n # load airspace source\n self.load_airspace_open_air_format(source_file_txt)\n\n self.kml_lines = self.kml_template['header']\n self.kml_lines.extend(self.kml_template['good_subdivided']['head'])\n # collect all A and B kml lines\n kml_A = []\n kml_B = []\n # transform airspaces and attach to A and B collect-lists\n for airspace in self.airspaces:\n airspace.make_kml_format(self.kml_template)\n if airspace.as_type == 'A':\n kml_A.extend(airspace.kml_lines)\n if airspace.as_type == 'B':\n kml_B.extend(airspace.kml_lines)\n\n self.kml_lines.extend(kml_A)\n self.kml_lines.extend(self.kml_template['good_subdivided']['tail'])\n # start B part\n self.kml_lines.extend(self.kml_template['bad_subdivided']['head'])\n self.kml_lines.extend(kml_B)\n self.kml_lines.extend(self.kml_template['bad_subdivided']['tail'])\n\n full_path_kml = source_file_txt[:-4] + '_converted.kml'\n # uisave dialog\n full_path_kml = filesavebox(default=full_path_kml, filetypes=\"*.kml\")\n if full_path_kml is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n # write to file\n f = open(full_path_kml, 'w')\n f.writelines(self.kml_lines)\n f.close()\n print('Resulting KML files was saved to: %s' % full_path_kml)",
"def keyholemarkup2x(file,output='df'):\n r = re.compile(r'(?<=\\.)km+[lz]?',re.I)\n try:\n extension = r.search(file).group(0) #(re.findall(r'(?<=\\.)[\\w]+',file))[-1]\n \n \n except IOError as e:\n logging.error(\"I/O error {0}\".format(e))\n if (extension.lower()=='kml') is True:\n buffer = file\n elif (extension.lower()=='kmz') is True:\n kmz = ZipFile(file, 'r')\n \n vmatch = np.vectorize(lambda x:bool(r.search(x)))\n A = np.array(kmz.namelist())\n sel = vmatch(A)\n buffer = kmz.open(A[sel][0],'r')\n \n else:\n raise ValueError('Incorrect file format entered. Please provide the '\n 'path to a valid KML or KMZ file.') \n \n \n parser = xml.sax.make_parser()\n handler = PlacemarkHandler()\n parser.setContentHandler(handler)\n parser.parse(buffer)\n \n try:\n kmz.close()\n except:\n pass\n \n df = pd.DataFrame(handler.mapping).T\n names = list(map(lambda x: x.lower(),df.columns))\n if 'description' in names:\n extradata = df.apply(PlacemarkHandler.htmlizer,axis=1)\n df = df.join(extradata)\n \n \n output = output.lower()\n \n if output=='df' or output=='dataframe' or output == None:\n result = df\n \n elif output=='csv':\n out_filename = file[:-3] + \"csv\"\n df.to_csv(out_filename,encoding='utf-8',sep=\"\\t\")\n result = (\"Successfully converted {0} to CSV and output to\"\n \" disk at {1}\".format(file,out_filename))\n \n elif output=='gpd' or output == 'gdf' or output=='geoframe' or output == 'geodataframe':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n result = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n \n \n elif output=='geojson' or output=='json':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n try:\n import geojson\n except ImportError as e:\n raise ImportError('This operation requires geojson. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"geojson\"\n gdf.to_file(out_filename,driver='GeoJSON')\n validation = geojson.is_valid(geojson.load(open(out_filename)))['valid']\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to GeoJSON and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The geojson conversion did not create a '\n 'valid geojson object. Try to clean your '\n 'data or try another file.')\n \n elif output=='shapefile' or output=='shp' or output =='esri shapefile':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n \n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n try:\n import shapefile\n except ImportError as e:\n raise ImportError('This operation requires pyshp. {0}'.format(e))\n \n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"shp\"\n gdf.to_file(out_filename,driver='ESRI Shapefile')\n sf = shapefile.Reader(out_filename)\n import shapefile\n sf = shapefile.Reader(out_filename)\n if len(sf.shapes())>0:\n validation = \"yes\"\n else:\n validation = \"no\"\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to Shapefile and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The Shapefile conversion did not create a '\n 'valid shapefile object. Try to clean your '\n 'data or try another file.') \n else:\n raise ValueError('The conversion returned no data; check if'\n ' you entered a correct output file type. '\n 'Valid output types are geojson, shapefile,'\n ' csv, geodataframe, and/or pandas dataframe.')\n \n return result",
"def __init__(self, full_path_of_source=''):\n if len(full_path_of_source) == 0:\n full_path_of_source = fileopenbox(default=os.path.curdir, filetypes=[\"*.txt\", \"*.kml\"])\n if full_path_of_source is None:\n print('Airspace conversion was aborted by the user')\n quit()\n # set template (this should not be changed)\n self.full_path_kml_template = r'Thermal_Map_Template5.kml' # set template file here: Folder must be named \"good\" and \"bad\"\n\n self.airspaces = [] # airspace container\n self.kml_template = {'header': [], 'good': [], 'bad': [], # will be filled after loading template\n 'good_subdivided': {'head':[], 'placemark': [], 'tail': []},\n 'bad_subdivided': {'head':[], 'placemark': [], 'tail': []}}\n self.txt_lines = [] # airspace file in open airspace format\n self.kml_lines = [] # airspace file in kml format\n \"\"\" handle conversion from and to KML / airspace format\"\"\"\n if full_path_of_source.lower().endswith('.kml'):\n self.kml_2_open_airspace_and_json_format(full_path_of_source)\n if full_path_of_source.lower().endswith('.txt'):\n self.open_airspace_format_2_kml(full_path_of_source)\n self.plot_all() # works for now only for TXT input",
"def make_open_airspace_format(self):\n # Extract coordinates from KML\n for idxline in range(len(self.kml_lines)):\n if '<name>' in self.kml_lines[idxline]:\n self.name = self.kml_lines[idxline].replace('\\t', '').replace('<name>', '').replace('</name>', '').replace('\\n','')\n if not self.name.startswith('TS'):\n self.name = 'TS_' + self.name\n print('Type: %s | Name: %s' % (self.as_type, self.name))\n if '<coordinates>' in self.kml_lines[idxline]:\n self.coordinates_kml = self.kml_lines[idxline + 1].replace('\\t', '').replace('\\n', '')\n break\n # start conversion to airspace format\n \"\"\" AC A\n AN TS_Erzgeb\n AL FL98\n AH FL99\n DP 50:26:22 N 012:17:59 E\n DP 50:25:25 N 012:18:26 E\n DP 50:24:40 N 012:19:01 E\n DP 50:24:06 N 012:19:46 E\"\"\"\n\n # AC A\n self.txt_lines.append('AC %s\\n' % self.as_type)\n # AN TS_Erzgeb\n self.txt_lines.append('AN %s\\n' % self.name)\n # heights\n self.txt_lines.append('AL FL98\\n')\n self.txt_lines.append('AH FL99\\n')\n # coordinates\n for coo_pt in self.coordinates_kml.split(' ')[:-1]:\n # Target format: DP 50:26:22 N 012:17:59 E\n lat_long = coo_pt.split(',')\n # latitude\n latDecAsStr = lat_long[1].split('.')\n #if '.' not in latDecAsStr: # take care of case \"51\" instead of \"51.123456\"\n # latDecAsStr += '.000000'\n lat_degree = abs(int(latDecAsStr[0]))\n #print(f'latDecAsStr {latDecAsStr}')\n if len(latDecAsStr)==1:\n latDecAsStr.append('0')\n lat_secondDec = (float('0.' + latDecAsStr[1])*60) % 1\n lat_minute = round((float('0.' + latDecAsStr[1])*60) - lat_secondDec)\n lat_second = round(lat_secondDec*60)\n cooString = ('DP %02d:%02d:%02d' %(lat_degree,lat_minute,lat_second))\n if latDecAsStr[0].startswith('-'):\n cooString += ' S'\n else:\n cooString += ' N'\n # longitude\n #print(f'converting lat_long {lat_long}')\n # take care of case: no decimal sign included, case \"11\" instead of \"11.123456\"\n if '.' not in lat_long[0]:\n lat_long[0] += '.0'\n lonDecAsStr = lat_long[0].split('.')\n lon_degree = abs(int(lonDecAsStr[0]))\n lon_secondDec = (float('0.' + lonDecAsStr[1]) * 60) % 1\n lon_minute = round((float('0.' + lonDecAsStr[1]) * 60) - lon_secondDec)\n lon_second = round(lon_secondDec * 60)\n cooString += (' %03d:%02d:%02d' % (lon_degree, lon_minute, lon_second))\n if lonDecAsStr[0].startswith('-'):\n cooString += ' W'\n else:\n cooString += ' E'\n cooString += '\\n'\n self.txt_lines.append(cooString)",
"def make_kml_format(self,kml_template):\n if self.as_type == 'A':\n self.kml_lines = kml_template['good_subdivided']['placemark']\n elif self.as_type == 'B':\n self.kml_lines = kml_template['bad_subdivided']['placemark']\n else:\n print('Unknown airspace type')\n # get idx of name and coordinates\n idxLine = 0\n while idxLine < len(self.kml_lines):\n #print(self.kml_lines[idxLine]\n if self.kml_lines[idxLine].startswith('\\t\\t\\t\\t<name>'): # begin of airspace\n idx_name = idxLine\n if '\\t\\t\\t\\t\\t\\t\\t<coordinates>\\n' in self.kml_lines[idxLine]: # begin of airspace\n idx_coordinates = idxLine+1\n idxLine += 1\n # transform coordinates\n # add all coordinates: Format is:\n # source: 'DP 50:26:22 N 012:17:59 E\\n'\n # target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0\n coo_list = [] # collect list of coorinates as strings\n for line in self.txt_lines:\n if line.startswith('AN'):\n self.name = line[3:].replace('\\n','')\n self.kml_lines[idx_name] = '\\t\\t\\t\\t<name>%s</name>\\n' % self.name\n\n if line.startswith('DP'):\n # lon\n lon_deg = float(line[14:17])\n lon_min = float(line[18:20])\n lon_sec = float(line[21:23])\n lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg\n if line[24] == 'W':\n lon_dec *= -1 # negative if west\n # lat\n lat_deg = float(line[3:5])\n lat_min = float(line[6:8])\n lat_sec = float(line[9:11])\n lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg\n if line[12] == 'S':\n lat_dec *= -1 # negative if west\n # attach coordinates\n coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec))\n # store for later plotting\n self.lat_dec.append(lat_dec)\n self.lon_dec.append(lon_dec)\n\n # make sure that shape is closed --> first an last point must be the same\n if coo_list[0] != coo_list[-1]:\n coo_list.append(coo_list[0])\n self.lat_dec.append(self.lat_dec[0])\n self.lon_dec.append(self.lon_dec[0])\n\n # write coordinate strings into kml\n self.kml_lines[idx_coordinates] = '\\t\\t\\t\\t\\t\\t\\t\\t' # is prefix. Coordinates to be added as string below\n for pt in coo_list:\n self.kml_lines[idx_coordinates] += pt\n print('Converted airspace %s' % self.name)",
"def importKML(filepath):\n\tf = open(filepath, 'r')\n\tstr = f.read()\n\treturn etree.fromstring(str)",
"def make_input_data_kmls(rundata):\n \n import os\n from . import topotools, dtopotools\n\n regions2kml(rundata, combined=False)\n gauges2kml(rundata)\n\n topofiles = rundata.topo_data.topofiles\n for f in topofiles:\n topo_file_name = f[-1]\n topo_type = f[0]\n topo2kml(topo_file_name, topo_type)\n \n dtopofiles = rundata.dtopo_data.dtopofiles\n for f in dtopofiles:\n dtopo_file_name = f[-1]\n dtopo_type = f[0]\n dtopo2kml(dtopo_file_name, dtopo_type)",
"def tdump2kml(inputDir):\n # Check inputdir\n if not os.path.exists(inputDir):\n print(\"Entered directory is invalid.\")\n sys.exit()\n\n os.chdir(inputDir)\n\n # Main loop\n for run in os.walk('.').next()[1]:\n\n os.chdir(run)\n\n # Filter tdump files\n files = glob.glob(\"*.tdump\")\n\n # Conversion\n for entry in files:\n p = subprocess.Popen(\"C:\\\\hysplit4\\\\exec\\\\trajplot.exe -i%s -o%s.ps -a3 -v1 -l1\" % \\\n (entry, entry), shell=True, stdout=subprocess.PIPE)\n p.wait()\n os.remove(entry[:-6])\n #p_out = p.communicate()\n #print p_out[0], p_out[1]\n\n # Move all kmls into dir kmls\n #sys.stdout.flush()\n kmls = glob.glob(\"*.kml\")\n\n if not os.path.exists(\"kmls\"):\n os.makedirs(\"kmls\")\n\n for kml in kmls:\n os.rename(kml, \"kmls\\\\%s\" % kml)\n\n # Remove redundant ps files\n pss = glob.glob(\"*.ps\")\n\n for ps in pss:\n os.remove(ps)\n\n print \"DONE : %s %s\\kmls\" % (run, os.getcwd())\n os.chdir('../')",
"def makepkl():\n # Old osgeo.ogr approach\n from osgeo import ogr\n # USTimeZones.kml source is unknown, but was freely available and\n # Has been converted to a pkl file\n kmlpath = os.path.join(os.path.dirname(__file__), 'USTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(uspklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(uspklpath, 'w'))\n\n # WorldTimeZones.kml source is below and was freely available and\n # Has been converted to a pkl file\n # https://productforums.google.com/forum/?fromgroups=#!msg/gec-tools/EdR18tz_5k8/MRPV85OxXIkJ\n kmlpath = os.path.join(os.path.dirname(__file__), 'WorldTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(worldpklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(worldpklpath, 'w'))",
"def get_kml_object(filename: str) -> fastkml.kml.KML:\n\t\n\tkml_obj = fastkml.kml.KML()\n\t\n\twith open(filename) as file:\n\t\tkml_obj.from_string(file.read().encode(\"utf-8\"))\n\t\n\treturn kml_obj",
"def read_kml():\n global kmldata\n global CONFIG\n if type(kmldata) == type(None):\n if not os.path.exists(CONFIG[\"kmlfile\"]):\n fiona.drvsupport.supported_drivers['KML'] = 'rw'\n kmldata = geopandas.read_file(CONFIG[\"kmlrepo\"], driver=\"KML\")\n os.makedirs(CONFIG[\"cachedir\"],exist_ok=True)\n with open(CONFIG[\"kmlfile\"], \"wb\") as fh:\n pickle.dump(kmldata,fh)\n else:\n with open(CONFIG[\"kmlfile\"], \"rb\") as fh:\n kmldata = pickle.load(fh)\n return kmldata",
"def export_kml(self, kmz=False):\n orderby = self.orderby.get()\n currentregion = self.region.get()\n if kmz:\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".kmz\",\n filetypes=((\"keyhole markup language\", \"*.kmz\"),\n (\"All Files\", \"*.*\")))\n else:\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".kml\",\n filetypes=((\"keyhole markup language\", \"*.kml\"),\n (\"All Files\", \"*.*\")))\n if outputfile:\n self.tabs.window.aistracker.create_kml_map(\n outputfile, kmzoutput=kmz, orderby=orderby,\n region=currentregion)\n else:\n raise ExportAborted('Export cancelled by user.')",
"def kml(cls, user, logs, kml, kml_doc):\n # KML Compliant Datetime Formatter\n kml_datetime_format = \"%Y-%m-%dT%H:%M:%S.%fZ\"\n icon = 'http://maps.google.com/mapfiles/kml/shapes/airports.png'\n threshold = 1 # Degrees\n\n kml_folder = kml.newfolder(name=user.username)\n\n flights = TakeoffOrLandingEvent.flights(user)\n if len(flights) == 0:\n return\n\n logs = filter(lambda log: cls._is_bad_position(log, threshold), logs)\n for i, flight in enumerate(flights):\n label = 'Flight {}'.format(i + 1) # Flights are one-indexed\n kml_flight = kml_folder.newfolder(name=label)\n\n flight_logs = filter(lambda x: flight.within(x.timestamp), logs)\n if len(flight_logs) < 2:\n continue\n\n coords = []\n angles = []\n when = []\n for entry in flight_logs:\n pos = entry.uas_position.gps_position\n # Spatial Coordinates\n coord = (pos.longitude, pos.latitude,\n units.feet_to_meters(entry.uas_position.altitude_msl))\n coords.append(coord)\n\n # Time Elements\n time = entry.timestamp.strftime(kml_datetime_format)\n when.append(time)\n\n # Degrees heading, tilt, and roll\n angle = (entry.uas_heading, 0.0, 0.0)\n angles.append(angle)\n\n # Create a new track in the folder\n trk = kml_flight.newgxtrack(name='Flight Path')\n trk.altitudemode = AltitudeMode.absolute\n\n # Append flight data\n trk.newwhen(when)\n trk.newgxcoord(coords)\n trk.newgxangle(angles)\n\n # Set styling\n trk.extrude = 1 # Extend path to ground\n trk.style.linestyle.width = 2\n trk.style.linestyle.color = Color.blue\n trk.iconstyle.icon.href = icon\n\n for obstacle in MovingObstacle.objects.all():\n obstacle.kml(path=flight_logs, kml=kml_flight, kml_doc=kml_doc)",
"def kml_file_to_open511_element(filename):\n ds = DataSource(filename)\n base_element = get_base_open511_element(lang='fr')\n for layer in ds:\n for feature in layer:\n base_element.append(feature_to_open511_element(feature))\n return base_element",
"def convert(input_filename, output_filename):\n c_file = pkg_resources.resource_filename('ShapelyChipDesigns', 'convert.rb')\n os.system('klayout -z -rd input='+input_filename+' -rd output='+output_filename+' -r '+c_file)",
"def process(kml_file, kmz=False):\n\ttry:\n\t\tif kmz:\n\t\t\tzipped = zipfile.ZipFile(kml_file)\n\t\t\tkml = Kml(zipped.open('doc.kml'))\n\t\telse: \n\t\t\tkml = Kml(open(kml_file))\n\texcept Exception as e:\n\t\tprint('Failed for %s: %s' % (kml_file, e))\n\telse:\n\t\tprint('FILE NAME: %s' % kml_file)\n\t\tif not is_duplicate(kml.as_dict(), collection): \n\t\t\t# try to update database AND\n\t\t\t# extract files to right place; if one\n\t\t\t# fails, undo the other:\t\n\t\t\ttry:\n\t\t\t\tcollection.insert_one(kml.as_dict())\n\t\t\texcept Exception as e:\n\t\t\t\tprint('Failed to update database with %s: %s' % (kml, e))\n\t\t\telse:\n\t\t\t\ttry:\n\t\t\t\t\tdest = 'static/kml/%s' % kml.uid\n\t\t\t\t\tif kmz:\n\t\t\t\t\t\tzipped.extractall(dest)\n\t\t\t\t\telse:\n\t\t\t\t\t\tif not os.path.exists(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest))\n\t\t\t\t\t\tshutil.copy(kml_file, '%s/doc.kml' % dest)\n\t\t\t\texcept Exception as e:\n\t\t\t\t\tprint('Failed to extract files: %s\\n\\tTrying to remove record from database...' % e)\n\t\t\t\t\ttry:\n\t\t\t\t\t\tcollection.remove(kml.as_json())\n\t\t\t\t\texcept Exception as e:\n\t\t\t\t\t\tprint('Failed to remove item from database -- db is no longer consistent w/ file system: %s' % e)\n\tfinally:\n\t\tif kmz:\n\t\t\tzipped.close()\n\t\telse:\n\t\t\tkml.close()",
"def kml_extract_RDD(xml_file):\n soup = BeautifulSoup(xml_file, \"lxml-xml\")\n return get_kml_content(soup)",
"def show_kml_list():\n out = []\n\n for filename in os.listdir(settings.KML_OUTPUT_DIR):\n path = os.path.join(settings.KML_OUTPUT_DIR, filename)\n if os.path.isdir(path):\n continue\n f = open(path)\n content = f.read(300)\n f.close()\n name = KML_NAME_RE.search(content)\n if not name:\n continue\n out.append((name.group(1), filename))\n\n return {'items': sorted(out, cmp=lambda a, b: dumb_czech_cmp(a, b)), 'MEDIA_URL': settings.MEDIA_URL}",
"def readKML(filename):\n\n kml_file = path.join(filename)\n\n #### se leen los elementos del KML\n with open(kml_file) as f:\n folder = parser.parse(f).getroot().Document.Folder\n\n #### se separan los elementos, nombres de los puntos y las coordenadas\n plnm=[]\n cordi=[]\n for pm in folder.Placemark:\n plnm1 = pm.name\n plcs1 = pm.Point.coordinates\n plnm.append(plnm1.text)\n cordi.append(plcs1.text)\n # print(cordi)\n # print(plnm) \n\n #### se genera el objeto pandas\n db=pd.DataFrame()\n db['point_name']=plnm\n db['cordinates']=cordi\n\n db['Longitude'], db['Latitude'], db['value'] = zip(*db['cordinates'].apply(lambda x: x.split(',', 2)))\n db[\"Longitude\"] = pd.to_numeric(db[\"Longitude\"])\n db[\"Latitude\"] = pd.to_numeric(db[\"Latitude\"])\n del db['cordinates']\n del db['value']\n\n db['Coordinates'] = list(zip(db.Longitude, db.Latitude))\n db['Coordinates'] = db['Coordinates'].apply(Point)\n\n # print(db)\n\n return db",
"def read(self, url: str):\n\n log.info(f\"Downloading KMZ file {basename(url)}\")\n kml = self.fetch(url)\n\n log.info(\"Parsing KML data\")\n self.iter_elems = iterparse(BytesIO(kml), events=(\"start\", \"end\"), resolve_entities=False)\n\n prod_items = {\n \"issuer\": \"Issuer\",\n \"product_id\": \"ProductID\",\n \"generating_process\": \"GeneratingProcess\",\n \"issue_time\": \"IssueTime\",\n }\n\n nsmap = None\n\n # Get Basic Metadata\n prod_definition = None\n prod_definition_tag = None\n for event, element in self.iter_elems:\n if event == \"start\":\n # get namespaces from root element\n if nsmap is None:\n nsmap = element.nsmap\n prod_definition_tag = f\"{{{nsmap['dwd']}}}ProductDefinition\"\n elif event == \"end\":\n if element.tag == prod_definition_tag:\n prod_definition = element\n # stop processing after head\n # leave forecast data for iteration\n break\n\n self.metadata = {k: prod_definition.find(f\"{{{nsmap['dwd']}}}{v}\").text for k, v in prod_items.items()}\n self.metadata[\"issue_time\"] = dt.datetime.fromisoformat(self.metadata[\"issue_time\"])\n\n # Get time steps.\n timesteps = prod_definition.findall(\n \"dwd:ForecastTimeSteps\",\n nsmap,\n )[0]\n self.timesteps = [dt.datetime.fromisoformat(i.text) for i in timesteps.getchildren()]\n\n # save namespace map for later iteration\n self.nsmap = nsmap",
"def load_asterix_category_format(k):\n global filenames\n try:\n __basePath__ = os.path.abspath(os.path.join(os.getcwd(), '../../../..'))\n\n # Look for file in current executing directory\n path_filename1 = filenames[k]\n\n # On default directory (absolute)\n path_filename2 = __basePath__ + \"/\" +filenames[k]\n\n # On default directory (relative)\n path_filename3 = os.path.dirname(os.path.realpath(__file__)) + \"/xml/\" + filenames[k]\n\n if os.path.isfile(path_filename1):\n # print \"Loading file '%s'\" % path_filename1\n return minidom.parse(path_filename1)\n\n if os.path.isfile(path_filename2):\n # print \"Loading file '%s'\" % path_filename2\n return minidom.parse(path_filename2)\n\n if os.path.isfile(path_filename3):\n # print \"Loading file '%s'\" % path_filename3\n return minidom.parse(path_filename3)\n\n return None\n\n except:\n traceback.print_exc()\n\n return None",
"def main():\n input_file_path = sys.argv[1]\n output_file_path = sys.argv[2]\n gps_df = create_df(input_file_path) # creates a data frame\n gps_df = clean_data(gps_df) # cleans the data\n print('Cleaning done')\n write_to_kml(gps_df, output_file_path) # writes to kml file",
"def funcion_escribe_kml():\n\n DB = \"geoinfo\" # default database name\n LOGIN = \"gast\" # default login\n PASSWORD = \"gast\" # default password\n\n cnx = MySQLdb.connect(db=DB, user=LOGIN, passwd=PASSWORD)\n cursor = cnx.cursor()\n\n cursor.execute(\"SELECT * from wlan order by essid\")\n results = cursor.fetchall()\n\n print \"Total APs: %s\" % len(results) # print total AP count\n\n f = open(sys.argv[1], 'w')\n f.write('<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n')\n f.write('<kml xmlns=\"http://earth.google.com/kml/2.2\">\\n')\n f.write(' <Folder>\\n')\n f.write(' <name>GpsDrive+Kismet wifis</name>\\n')\n # By default folder is showed\n f.write(' <visibility>1</visibility>\\n')\n # GpsDrive icon\n f.write(' <ScreenOverlay>\\n')\n f.write(' <name>Info</name>\\n')\n f.write(' <description>Wifi data</description>\\n')\n f.write(' <visibility>1</visibility>\\n')\n f.write(' <Icon>\\n')\n f.write(' <href>https://raw.github.com/rodrigorega/GpsDriveToGoogleEarth/master/img/gpsdrivelogo.png</href>\\n')\n f.write(' </Icon>\\n')\n f.write(' <overlayXY x=\"0\" y=\"-1\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <screenXY x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <rotationXY x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <size x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' </ScreenOverlay>')\n\n # write all APs to .kml file\n for line in results:\n name = line[6].replace('&', 'and') # To avoid Google Earth errors\n wep = line[8]\n lat = line[1]\n lon = line[2]\n mac = line[5]\n\n f.write('\\n')\n f.write(' <Placemark>\\n')\n f.write(' <name>%s</name>\\n' % name)\n f.write(' <description>')\n f.write(' <![CDATA[ <table width=\"300\"><tr><td>')\n f.write(' - EESID: %s\\n <br />' % name)\n f.write(' - BBSID: %s\\n <br />' % mac)\n tipo_ap = funcion_tipo_ap(wep)\n f.write(' - Security: %s\\n <br />' % tipo_ap)\n f.write(' - GPS coords.: %s, %s\\n <br />' % (lon, lat))\n f.write(' </td></tr></table> ]]>')\n f.write(' </description>\\n')\n f.write(' <visibility>1</visibility>\\n')\n\n tipo_ap = funcion_tipo_ap(wep) # get AP type\n\n # Draw AP icon\n f.write('<Style>')\n f.write('<IconStyle>')\n f.write(' <Icon><href>https://raw.github.com/rodrigorega/GpsDriveToGoogleEarth/master/img/%s.png</href></Icon>\\n' % tipo_ap)\n f.write('</IconStyle>')\n f.write('</Style>')\n f.write(' <Point><coordinates>%s,%s,45</coordinates></Point>\\n' % (lon, lat))\n f.write(' </Placemark>\\n')\n\n f.write(' </Folder>\\n')\n f.write('</kml>')",
"def line2kml(xy,fname='line.kml',name='line',color='00FFFF',width=3,\n verbose=True):\n \n if type(xy[0]) is tuple:\n x1,x2 = xy[0]\n y1,y2 = xy[1]\n else:\n x1,x2,y1,y2 = xy[0:]\n\n if verbose:\n print(\"Line: %10.6f %10.6f %10.6f %10.6f\" % (x1,x2,y1,y2))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = name\n mapping['desc'] = \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\" % (f2s(y1),f2s(y2))\n mapping['color'] = color\n mapping['width'] = width\n\n region_text = kml_line(mapping)\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def dtopo2kml(dtopo_file_name, dtopo_type, color='8888FF'):\n\n import os\n from clawpack.geoclaw import dtopotools\n dtopo = dtopotools.DTopography()\n dtopo.read(dtopo_file_name, dtopo_type)\n x1 = dtopo.x.min()\n x2 = dtopo.x.max()\n y1 = dtopo.y.min()\n y2 = dtopo.y.max()\n xy = (x1,x2,y1,y2)\n name = os.path.splitext(os.path.split(dtopo_file_name)[-1])[0]\n file_name = '%s.kml' % name\n box2kml(xy, file_name, name, color)",
"def kmlWriter(output_data, output_dir, output_name):\n msg = 'Writing ' + output_name + ' KML output.'\n print '[+]', msg\n logging.info(msg)\n # Instantiate a Kml object and pass along the output filename\n kml = simplekml.Kml(name=output_name)\n for exif in output_data:\n if 'Latitude' in exif.keys() and 'Latitude Reference' in exif.keys() and 'Longitude Reference' in exif.keys() and 'Longitude' in exif.keys():\n\n if 'Original Date' in exif.keys():\n dt = exif['Original Date']\n else:\n dt = 'N/A'\n\n if exif['Latitude Reference'] == 'S':\n latitude = '-' + exif['Latitude']\n else:\n latitude = exif['Latitude']\n\n if exif['Longitude Reference'] == 'W':\n longitude = '-' + exif['Longitude']\n else:\n longitude = exif['Longitude']\n\n kml.newpoint(name=exif['Name'], description='Originally Created: ' + dt,\n coords=[(longitude, latitude)])\n else:\n pass\n kml.save(os.path.join(output_dir, output_name))",
"def export_kmz(self):\n self.export_kml(kmz=True)",
"def write_kml_object(kml_object: fastkml.kml.KML, filename: str) -> None:\n\t\n\twith open(filename, \"w+\") as file:\n\t\tfile.write(kml_object.to_string())",
"def test_convert_csv_to_kml(self):\n import tempfile\n from pykml.util import convert_csv_to_kml\n\n # create a CSV file for testing\n csvfile = tempfile.TemporaryFile(mode='w+')\n csvfile.write('name,snippet,lat,lon\\n')\n csvfile.write('first,The first one,45.0,-90.0\\n')\n csvfile.write('second,The second one,46.0,-89.0\\n')\n csvfile.write('third,\"The third one (with quotes)\",45.0,-88.0\\n')\n csvfile.seek(0)\n\n kmlobj = convert_csv_to_kml(csvfile)\n csvfile.close()\n\n target = etree.fromstring(\n '<kml '\n 'xmlns:atom=\"http://www.w3.org/2005/Atom\" '\n 'xmlns:gx=\"http://www.google.com/kml/ext/2.2\" '\n 'xmlns=\"http://www.opengis.net/kml/2.2\">'\n '<Document>'\n '<Folder>'\n '<name>KmlFile</name>'\n '<Placemark>'\n '<name>first</name>'\n '<Snippet maxLines=\"2\">The first one</Snippet>'\n '<description>'\n '<![CDATA['\n '<table border=\"1\"'\n '<tr><th>name</th><td>first</td></tr>'\n '<tr><th>snippet</th><td>The first one</td></tr>'\n '<tr><th>lat</th><td>45.0</td></tr>'\n '<tr><th>lon</th><td>-90.0</td></tr>'\n '</table>'\n ']]>'\n '</description>'\n '<Point>'\n '<coordinates>-90.0,45.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '<Placemark>'\n '<name>second</name>'\n '<Snippet maxLines=\"2\">The second one</Snippet>'\n '<description><![CDATA[<table border=\"1\"<tr><th>name</th><td>second</td></tr><tr><th>snippet</th><td>The second one</td></tr><tr><th>lat</th><td>46.0</td></tr><tr><th>lon</th><td>-89.0</td></tr></table>]]></description>'\n '<Point>'\n '<coordinates>-89.0,46.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '<Placemark>'\n '<name>third</name>'\n '<Snippet maxLines=\"2\">The third one (with quotes)</Snippet>'\n '<description><![CDATA[<table border=\"1\"<tr><th>name</th><td>third</td></tr><tr><th>snippet</th><td>The third one (with quotes)</td></tr><tr><th>lat</th><td>45.0</td></tr><tr><th>lon</th><td>-88.0</td></tr></table>]]></description>'\n '<Point>'\n '<coordinates>-88.0,45.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '</Folder>'\n '</Document>'\n '</kml>'\n )\n self.assertTrue(compare_xml(target, kmlobj))",
"def saveKML(kmlFile):\n\n tilePath = os.path.basename('map-NYC_heatmap.png')\n north = topLeftLat\n south = bottomRightLat\n east = topLeftLon\n west = bottomRightLon\n \n bytes = KML % (tilePath, north, south, east, west)\n file(kmlFile, \"w\").write(bytes)",
"def image2kml(self,varname,filename=None):\n\n vdata=self.get_array(varname)\n im=self.get_image(vdata)\n if filename is None:\n filename='%s.png' % varname\n f=open(filename,'w')\n f.write(im)\n f.close()\n d=self.get_kml_dict(varname,filename)\n pylab.close('all')\n return self.__class__.kmlimage % d",
"def read(filepath):\n # Core Library modules\n import xml.etree.ElementTree\n\n root = xml.etree.ElementTree.parse(filepath).getroot()\n\n # Get the raw data\n recording = []\n strokes = sorted(\n root.findall(\"{http://www.w3.org/2003/InkML}trace\"),\n key=lambda child: int(child.attrib[\"id\"]),\n )\n time = 0\n for stroke in strokes:\n stroke = stroke.text.strip().split(\",\")\n stroke = [point.strip().split(\" \") for point in stroke]\n if len(stroke[0]) == 3:\n stroke = [\n {\"x\": float(x), \"y\": float(y), \"time\": float(t)} for x, y, t in stroke\n ]\n else:\n stroke = [{\"x\": float(x), \"y\": float(y)} for x, y in stroke]\n new_stroke = []\n for p in stroke:\n new_stroke.append({\"x\": p[\"x\"], \"y\": p[\"y\"], \"time\": time})\n time += 20\n stroke = new_stroke\n time += 200\n recording.append(stroke)\n\n # Get LaTeX\n formula_in_latex = None\n annotations = root.findall(\"{http://www.w3.org/2003/InkML}annotation\")\n for annotation in annotations:\n if annotation.attrib[\"type\"] == \"truth\":\n formula_in_latex = annotation.text\n hw = handwritten_data.HandwrittenData(\n json.dumps(recording), formula_in_latex=formula_in_latex\n )\n for annotation in annotations:\n if annotation.attrib[\"type\"] == \"writer\":\n hw.writer = annotation.text\n elif annotation.attrib[\"type\"] == \"category\":\n hw.category = annotation.text\n elif annotation.attrib[\"type\"] == \"expression\":\n hw.expression = annotation.text\n\n # Get segmentation\n segmentation = []\n trace_groups = root.findall(\"{http://www.w3.org/2003/InkML}traceGroup\")\n if len(trace_groups) != 1:\n raise Exception(\n \"Malformed InkML\",\n (\n \"Exactly 1 top level traceGroup expected, found %i. \"\n \"(%s) - probably no ground truth?\"\n )\n % (len(trace_groups), filepath),\n )\n trace_group = trace_groups[0]\n symbol_stream = [] # has to be consistent with segmentation\n for tg in trace_group.findall(\"{http://www.w3.org/2003/InkML}traceGroup\"):\n annotations = tg.findall(\"{http://www.w3.org/2003/InkML}annotation\")\n if len(annotations) != 1:\n raise ValueError(\n \"%i annotations found for '%s'.\" % (len(annotations), filepath)\n )\n db_id = formula_to_dbid(normalize_symbol_name(annotations[0].text))\n symbol_stream.append(db_id)\n trace_views = tg.findall(\"{http://www.w3.org/2003/InkML}traceView\")\n symbol = []\n for traceView in trace_views:\n symbol.append(int(traceView.attrib[\"traceDataRef\"]))\n segmentation.append(symbol)\n hw.symbol_stream = symbol_stream\n hw.segmentation = segmentation\n _flat_seg = [stroke2 for symbol2 in segmentation for stroke2 in symbol2]\n assert len(_flat_seg) == len(\n recording\n ), \"Segmentation had length %i, but recording has %i strokes (%s)\" % (\n len(_flat_seg),\n len(recording),\n filepath,\n )\n assert set(_flat_seg) == set(range(len(_flat_seg))), (\n f\"set(_flat_seg) = {set(_flat_seg)} !=\"\n f\"{set(range(len(_flat_seg)))} = set(range(len(_flat_seg)))\"\n )\n hw.inkml = beautify_xml(filepath)\n hw.filepath = filepath\n return hw",
"def write(self,vname,kmz='out.kmz'):\n\n imgs=[] # to store a list of all images created\n content=[] # the content of the main kml\n vstr='files/%s_%05i.png' # format specification for images (all stored in `files/' subdirectory)\n\n # create empty files subdirectory for output images\n try:\n shutil.rmtree('files')\n except:\n pass\n os.makedirs('files')\n\n # loop through all time slices and create the image data\n # appending to the kml content string for each image\n for i in xrange(0,self.nstep,1):\n kml=ncNWRC(self.filename,istep=i)\n img=vstr % (vname,i)\n imgs.append(img)\n content.append(kml.image2kml(vname,img))\n\n # create the main kml file\n kml=ncNWRC.kmlstr % \\\n {'content':'\\n'.join(content),\\\n 'prog':ncNWRC.progname}\n\n # create a zipfile to store all images + kml into a single compressed file\n z=zipfile.ZipFile(kmz,'w',compression=zipfile.ZIP_DEFLATED)\n z.writestr(kmz[:-3]+'kml',kml)\n for img in imgs:\n z.write(img)\n z.close()",
"def copy_kml(results_dir):\n if not os.path.exists(results_dir):\n os.makedirs(results_dir)\n\n copy2(\n os.path.join(os.path.dirname(__file__), '..', 'raw', 'KML_Samples.kml'),\n results_dir\n )",
"def main():\n #short GPS Test\n filename = 'KML_short_test.kml'\n gps_filename = 'gps_short_test.txt'\n gpsfile = open(gps_filename, 'r')\n file = open(filename, 'w')\n addHeader(file)\n coordinate_lst = convert(gpsfile)\n cleaned = GPS_to_CostMap.clean_gps_data(coordinate_lst)\n write_coordinates(cleaned, file)\n addTrailer(file)\n file.close()\n\n #Repeat test\n filename = 'KML_repeat_test1.kml'\n gps_filename = 'gps_1.txt'\n gpsfile = open(gps_filename, 'r')\n file = open(filename, 'w')\n addHeader(file)\n coordinate_lst = convert(gpsfile)\n cleaned = GPS_to_CostMap.clean_gps_data(coordinate_lst)\n write_coordinates(cleaned, file)\n addTrailer(file)\n file.close()\n\n filename = 'KML_repeat_test2.kml'\n gps_filename = 'gps_1.txt'\n gpsfile = open(gps_filename, 'r')\n file = open(filename, 'w')\n addHeader(file)\n coordinate_lst = convert(gpsfile)\n cleaned = GPS_to_CostMap.clean_gps_data(coordinate_lst)\n write_coordinates(cleaned, file)\n addTrailer(file)\n file.close()",
"def import_cml(self, fname):\n self.ftype = 'cml'\n with open(fname) as f:\n lines = f.readlines()\n self.n_atom = 0\n self.n_connect = 0\n self.sym = []\n self.at_num = []\n self.xyz = []\n self.connect = []\n for i in range(len(lines)):\n if lines[i].split()[0] == '<atom':\n self.n_atom += 1\n tmp = lines[i].split()\n self.sym.append(tmp[2].split('\"')[1])\n self.at_num.append(self.sym2num(tmp[2].split('\"')[1]))\n x = float(tmp[3].split('\"')[1])\n y = float(tmp[4].split('\"')[1])\n z = float(tmp[5].split('\"')[1])\n self.xyz.append([x, y, z])\n elif lines[i].split()[0] == '<bond':\n self.n_connect += 1\n tmp = lines[i].split()\n a = int(tmp[1].split('\"')[1].split('a')[1])\n b = int(tmp[2].split('\"')[0].split('a')[1])\n self.connect.append([a, b])\n self.xyz = np.array(self.xyz)",
"def __init__(self, kml):\n\t\tself.uid = uuid.uuid4()\n\t\tself.tree = ET.parse(kml)\n\t\tself.root = self.tree.getroot()\n\t\t# don't catch error on this -- we want it to fail if no time:\n\t\tself._get_time()\n\t\ttry:\n\t\t\tself.name = self.root.find('.//{http://www.opengis.net/kml/2.2}name').text\n\t\texcept Exception as e:\n\t\t\t#print('self.name failed: %s' % e)\n\t\t\tself.name = 'Unnamed'\n\t\ttry:\n\t\t\tself.activity = self.root.find('.//{http://www.opengis.net/kml/2.2}Data//{http://www.opengis.net/kml/2.2}value').text.title()\n\t\texcept Exception as e:\n\t\t\t#print('self.activity failed: %s' % e)\n\t\t\tself.activity = 'Unknown'\n\t\ttry:\n\t\t\t# just a big string of text, but there is lots\n\t\t\t# to pull out of it, eg activity type:\n\t\t\tself.description = self.root.find('.//{http://www.opengis.net/kml/2.2}description').text\n\t\texcept Exception as e:\n\t\t\tprint('self.description failed: %s' % e)",
"def write_kml(self,varnames):\n if type(varnames) is str:\n varnames=(varnames,)\n content=[]\n for varname in varnames:\n content.append(self.image2kml(varname))\n kml=self.__class__.kmlstr % \\\n {'content':'\\n'.join(content),\\\n 'prog':self.__class__.progname}\n f=open(self.__class__.kmlname,'w')\n f.write(kml)\n f.close()",
"def topo2kml(topo_file_name, topo_type, color='00FF00'):\n\n import os\n from clawpack.geoclaw import topotools\n topo = topotools.Topography(topo_file_name, topo_type=topo_type)\n topo.read_header()\n xy = topo.extent\n name = os.path.splitext(os.path.split(topo_file_name)[-1])[0]\n file_name = '%s.kml' % name\n box2kml(xy, file_name, name, color)",
"def regions2kml(rundata=None,fname='regions.kml',verbose=True,combined=True):\n\n from numpy import cos,pi,floor\n\n if rundata is None:\n try:\n import setrun\n reload(setrun)\n rundata = setrun.setrun()\n except:\n raise IOError(\"*** cannot execute setrun file\")\n\n clawdata = rundata.clawdata\n x1,y1 = clawdata.lower[0:]\n x2,y2 = clawdata.upper[0:]\n description = \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\\n\" % (f2s(y1),f2s(y2))\n\n mx,my = clawdata.num_cells[0:]\n dx = (x2-x1)/float(mx)\n dx_meters = dx*111e3*cos(pi*0.5*(y1+y2)/180.)\n dy = (y2-y1)/float(my)\n dy_meters = dy*111e3\n if verbose:\n print(\"Domain: %10.6f %10.6f %10.6f %10.6f\" % (x1,x2,y1,y2))\n dx_deg,dx_min,dx_sec = deg2dms(dx)\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n #print \"Level 1 resolution: dx = %g deg, %g min, %g sec = %g meters\" \\\n # % (dx_deg,dx_min,dx_sec,dx_meters)\n levtext = \"Level 1 resolution: dy = %g deg, %g min, %g sec = %g meters\\n\" \\\n % (dy_deg,dy_min,dy_sec,dy_meters)\n if verbose:\n print(levtext)\n description = description + levtext\n\n amr_levels_max = rundata.amrdata.amr_levels_max\n refinement_ratios_y = rundata.amrdata.refinement_ratios_y\n num_ref_ratios = len(refinement_ratios_y)\n if amr_levels_max > num_ref_ratios+1:\n raise IOError(\"*** Too few refinement ratios specified for \" \\\n + \"amr_levels_max = %i\" % amr_levels_max)\n dy_levels = (num_ref_ratios+1) * [dy]\n for k,r in enumerate(refinement_ratios_y):\n level = k+2\n dy = dy_levels[k] / r\n dy_levels[k+1] = dy\n dy_meters = dy*111e3\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n levtext = \"Level %s resolution: dy = %g deg, %g min, %g sec = %g meters (refined by %i)\\n\" \\\n % (level,dy_deg,dy_min,dy_sec,dy_meters,r)\n if verbose:\n print(levtext)\n description = description + levtext\n\n if verbose:\n print(\"Allowing maximum of %i levels\" % amr_levels_max)\n\n elev = 0.\n if not combined:\n fname = 'Domain.kml'\n\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = 'Computational Domain'\n mapping['desc'] = description\n mapping['color'] = \"0000FF\" # red\n mapping['width'] = 2\n\n region_text = kml_region(mapping)\n kml_text = kml_text + region_text\n\n if not combined:\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)\n\n \n\n regions = rundata.regiondata.regions\n if len(regions)==0 and verbose:\n print(\"No regions found in setrun.py\")\n\n\n for rnum,region in enumerate(regions):\n if not combined:\n fname = 'Region_%s.kml' % str(rnum).zfill(2)\n kml_text = kml_header(fname)\n\n minlevel,maxlevel = region[0:2]\n t1,t2 = region[2:4]\n x1,x2,y1,y2 = region[4:]\n\n if verbose:\n print(\"Region %i: %10.6f %10.6f %10.6f %10.6f\" \\\n % (rnum,x1,x2,y1,y2))\n print(\" minlevel = %i, maxlevel = %i\" \\\n % (minlevel,maxlevel) \\\n + \" t1 = %s, t2 = %s\" % (f2s(t1),f2s(t2)))\n mapping = {}\n mapping['minlevel'] = minlevel\n mapping['maxlevel'] = maxlevel\n mapping['t1'] = t1\n mapping['t2'] = t2\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = 'Region %i' % rnum\n description = \"minlevel = %i, maxlevel = %i\\n\" % (minlevel,maxlevel) \\\n + \" t1 = %s, t2 = %s\\n\" % (f2s(t1),f2s(t2)) \\\n + \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\\n\\n\" % (f2s(y1),f2s(y2))\n if len(dy_levels) >= minlevel:\n dy = dy_levels[minlevel-1]\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n dy_meters = dy*111e3\n levtext = \"Level %s resolution: \\ndy = %g deg, %g min, %g sec \\n= %g meters\\n\" \\\n % (minlevel,dy_deg,dy_min,dy_sec,dy_meters)\n description = description + levtext\n if (maxlevel > minlevel) and (len(dy_levels) >= maxlevel):\n dy = dy_levels[maxlevel-1]\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n dy_meters = dy*111e3\n levtext = \"\\nLevel %s resolution: \\ndy = %g deg, %g min, %g sec \\n= %g meters\\n\" \\\n % (maxlevel,dy_deg,dy_min,dy_sec,dy_meters)\n description = description + levtext\n mapping['desc'] = description\n mapping['color'] = \"FFFFFF\" # white\n mapping['width'] = 3\n\n region_text = kml_region(mapping)\n kml_text = kml_text + region_text\n if not combined:\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)\n\n if combined:\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def k2g(\n kml_path_or_buffer,\n output_dir,\n feature_collection_name,\n style_type,\n style_filename,\n separate_folders,\n):\n style, *layers = m.convert(\n kml_path_or_buffer,\n style_type=style_type,\n separate_folders=separate_folders,\n feature_collection_name=feature_collection_name,\n )\n\n # Create output directory if it doesn't exist\n output_dir = pl.Path(output_dir)\n if not output_dir.exists():\n output_dir.mkdir(parents=True)\n output_dir = output_dir.resolve()\n\n # Write style file\n path = output_dir / style_filename\n with path.open(\"w\") as tgt:\n json.dump(style, tgt)\n\n # Create filenames for layers\n stems = m.disambiguate(m.to_filename(layer[\"name\"]) for layer in layers)\n filenames = [f\"{stem}.geojson\" for stem in stems]\n\n # Write layer files\n for i in range(len(layers)):\n path = output_dir / filenames[i]\n with path.open(\"w\") as tgt:\n json.dump(layers[i], tgt)",
"def main(input):\n path = os.path.abspath(input)\n name = os.path.splitext(os.path.basename(path))[0]\n p = os.path.join(os.getcwd(),name)\n i = 1\n p1 = p\n while os.path.exists(p1):\n p1 = \"{p}-{i}\".format(p=p,i=i)\n i += 1\n p = p1\n os.mkdir(p1)\n os.mkdir(os.path.join(p1,\"media\"))\n with zipfile.ZipFile(path) as zf:\n for file in zf.namelist():\n # Path traversal defense copied from\n # http://hg.python.org/cpython/file/tip/Lib/http/server.py#l789\n words = file.split('/')\n dest = os.path.join(p1, \"media\")\n if words[0] == \"word\" and words[1] == \"media\":\n for word in words[2:]:\n while True:\n drive, word = os.path.splitdrive(word)\n head, word = os.path.split(word)\n if not drive:\n break\n if word in (os.curdir, os.pardir, ''):\n continue\n dest = os.path.join(dest, word)\n click.echo(\"{} -> {}\".format(file, dest))\n of = open(dest, 'wb')\n of.write(zf.read(file))\n of.close()\n\n newdoc = os.path.join(p1, os.path.basename(path))\n lyxfile = os.path.join(p1, name + \".lyx\")\n texfile = os.path.join(p1, name + \".tex\")\n shutil.copyfile(path, newdoc)\n os.system(\"pandoc -s -f docx -t latex -o '{of}' '{i}'\".format(of=texfile, i=newdoc))\n os.system(\"tex2lyx '{i}' '{o}'\".format(i=texfile, o=lyxfile))\n os.remove(texfile)\n os.system(\"convertwmf {dir}\".format(dir=os.path.join(p1, \"media\")))\n click.echo(lyxfile)",
"def json_to_lkg(filename):\n\n nx_graph = json_to_nx(filename)\n lkg = nx_to_lkg(nx_graph)\n return(lkg)",
"def gauges2kml(rundata=None, fname='gauges.kml', verbose=True):\n\n\n if rundata is None:\n try:\n import setrun\n reload(setrun)\n rundata = setrun.setrun()\n except:\n raise IOError(\"*** cannot execute setrun file\")\n\n elev = 0.\n kml_text = kml_header(fname)\n\n\n gauges = rundata.gaugedata.gauges\n if len(gauges)==0 and verbose:\n print(\"No gauges found in setrun.py\")\n\n\n for rnum,gauge in enumerate(gauges):\n t1,t2 = gauge[3:5]\n x1,y1 = gauge[1:3]\n gaugeno = gauge[0]\n if verbose:\n print(\"Gauge %i: %s, %s \\n\" % (gaugeno,f2s(x1),f2s(y1)) \\\n + \" t1 = %s, t2 = %s\" % (f2s(t1),f2s(t2)))\n mapping = {}\n mapping['gaugeno'] = gaugeno\n mapping['t1'] = t1\n mapping['t2'] = t2\n mapping['x1'] = x1\n mapping['y1'] = y1\n mapping['elev'] = elev\n mapping['name'] = 'Gauge %i' % rnum\n description = \" t1 = %s, t2 = %s\\n\" % (f2s(t1),f2s(t2)) \\\n + \" x1 = %s, y1 = %s\\n\" % (f2s(x1),f2s(y1))\n mapping['desc'] = description\n\n gauge_text = kml_gauge(mapping)\n kml_text = kml_text + gauge_text\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def open_input_files(self):\n self.dictionaryFile = open(self.dictionaryFile, 'r', encoding=self.encoding)\n\n if self.annotationFile :\n self.annotationFile = open(self.annotationFile, 'r', encoding=self.encoding)\n elif self.annotationFile is None:\n try:\n self.annotationFile = open(os.path.join(self.dictionaryPath, self.dictionaryName + '.ann'), 'r', encoding=self.encoding)\n except FileNotFoundError:\n if self.verbose >= 2:\n sys.stdout.write (\"Warning: annotation file is not found.\\n\")\n\n if self.abbreviationsFile :\n self.abbreviationsFile = open(self.abbreviationsFile, 'r', encoding=self.encoding)\n elif self.abbreviationsFile is None:\n try:\n self.abbreviationsFile = open(os.path.join(self.dictionaryPath, self.dictionaryName + '_abrv.dsl'), 'r', encoding=self.encoding)\n except FileNotFoundError:\n if self.verbose >= 2:\n sys.stdout.write (\"Warning: abbreviations file is not found.\\n\")",
"def read_model(modelfile, dictlist):\n global dxdict\n global dxlist\n global import_img\n dxdict, dxlist = {}, [] # the list is needed for fixed ordering\n mod = io.open(modelfile, 'r')\n st = next(mod)\n ### image adress is found\n while 'SCHEME_IMAGE' not in st:\n st = next(mod)\n #image_adress = st.strip().split()[-1]\n #import_img = ImageTk.PhotoImage(Image.open(image_adress).resize((496, 384), Image.ANTIALIAS))\n #scheme.configure(image = import_img)\n ### the file must contain equations for ODE between ***STATES*** and ***END*** statements\n while \"***STATES***\" not in st:\n st = next(mod)\n #\n while \"***END***\" not in st:\n st = next(mod)\n try:\n dxdict[st.split('=')[0].strip()] = st.split('=')[1].strip().strip(';')\n dxlist.append(st.split('=')[0].strip())\n except:\n continue\n ## now, add dict names to the equations\n ## also, add state names to the PREDEFINED dict\n for s in dxdict.keys():\n for d in dictlist:\n keys = d + '.keys()'\n for k in eval(keys):\n dxdict[s] = dxdict[s].replace(k, \"%(d)s['%(k)s']\" % vars())\n ##\n for i in dxdict.keys():\n for j in dxdict.keys():\n if \"Xdict['%(j)s']\" % vars() not in dxdict[i]:\n dxdict[i] = dxdict[i].replace(j, \"Xdict['%(j)s']\" % vars())\n modelprint, nstates = os.path.basename(modelfile), len(dxlist)",
"def import_opml(self, path):\n tree = ET.parse(path)\n root = tree.getroot()\n for feed_el in root.find('body').findall('outline'):\n if feed_el.get('type') == 'rss':\n self.add_by_url(feed_el.get('xmlUrl'), feed_el.get('text'))",
"def write(file_path, kml_str):\n\n fa.text_writer(file_path, kml_str)",
"def __invokeCdx2Cml(self, inputFile, outputDir):\n tmpFs = None\n try:\n tmpFs = self.iceContext.fs.unzipToTempDirectory(inputFile)\n toDir = tmpFs.absPath()\n tmpFs.unzipToDirectory(inputFile, toDir)\n tmpFs.makeDirectory(\"raw\")\n tmpFs.makeDirectory(\"cml\")\n cmdPath = self.__getChemDrawCmd()\n self.iceContext.system.execute2(cmdPath, \"-INDIR\", toDir,\n \"-INSUFF\", '\"\"',\n \"-RAWDIR\", \"../raw\",\n \"-RAWSUFF\", \".xml\",\n \"-OUTDIR\", \"../cml\", printErr = False)\n _, name, _ = tmpFs.splitPathFileExt(inputFile)\n for file in tmpFs.listFiles(\"cml\"):\n srcFile = tmpFs.join(\"cml\", file)\n outFile = tmpFs.join(outputDir, file.replace(\"Object \", name + \"-\"))\n tmpFs.copy(srcFile, outFile)\n finally:\n if tmpFs != None:\n tmpFs.delete()",
"def process(self):\n # Opening and preprocessing of the input file\n if self.options.mbtiles_fromdisk or self.options.mbtiles_todisk:\n if self.options.mbtiles_fromdisk:\n i_parm=10\n if self.options.mbtiles_todisk:\n i_parm=11\n if self.options.verbose:\n print \"GDAL2MbTiles :mbtiles from/to disk [\",i_parm,\"] mbtiles_fromdisk[\",self.options.mbtiles_fromdisk,\"] mbtiles_todisk[\",self.options.mbtiles_todisk,\"]\"\n self.mbtiles_setup(i_parm)\n return\n else:\n if self.options.verbose:\n print \"GDAL2MbTiles :tile creation mbtiles[\",self.options.mbtiles,\"]\"\n self.open_input()\n # Generation of main metadata files and HTML viewers\n self.generate_metadata()\n # Generation of the lowest tiles\n self.generate_base_tiles()\n # Generation of the overview tiles (higher in the pyramid)\n self.generate_overview_tiles()\n # Generating of KML\n self.generate_kml()",
"def to_oskar_telescope_model(self, filename):\n pass",
"def validate_kml():\n from pykml.parser import parse\n from optparse import OptionParser\n\n parser = OptionParser(\n usage=\"usage: %prog FILENAME_or_URL\",\n version=\"%prog 0.1\",\n )\n parser.add_option(\"--schema\", dest=\"schema_uri\",\n help=\"URI of the XML Schema Document used for validation\")\n (options, args) = parser.parse_args()\n if len(args) != 1:\n parser.error(\"wrong number of arguments\")\n else:\n uri = args[0]\n\n try:\n # try to open as a file\n fileobject = open(uri)\n except IOError:\n try:\n fileobject = urllib2.urlopen(uri)\n except ValueError:\n raise ValueError('Unable to load URI {0}'.format(uri))\n except:\n raise\n\n doc = parse(fileobject, schema=None)\n\n if options.schema_uri:\n schema = Schema(options.schema_uri)\n else:\n # by default, use the OGC base schema\n sys.stdout.write(\"Validating against the default schema: {0}\\n\".format(OGCKML_SCHEMA))\n schema = Schema(OGCKML_SCHEMA)\n\n sys.stdout.write(\"Validating document...\\n\")\n if schema.validate(doc):\n sys.stdout.write(\"Congratulations! The file is valid.\\n\")\n else:\n sys.stdout.write(\"Uh-oh! The KML file is invalid.\\n\")\n sys.stdout.write(schema.assertValid(doc))\n # close the fileobject, if needed\n try:\n fileobject\n except NameError:\n pass #variable was not defined\n else:\n fileobject.close",
"def __init__(self,kml_file):\n super().__init__()\n self.offset = False\n self.inputKML = openKML(kml_file)\n\n self.Documents = self._set_documents()\n self.Folders = self._set_folders(self.Documents)\n if self.Folders != None:\n self.Placemarks = self._set_placemarks(self.Folders)\n else:\n self.Placemarks = self._set_placemarks(self.Documents)\n\n self._set_dataframe()",
"def writer(output, output_name, output_data):\n\n kml = simplekml.Kml(name=output_name)\n for exif in output_data:\n if('Latitude' in exif.keys() and\n 'Latitude Reference' in exif.keys() and\n 'Longitude Reference' in exif.keys() and\n 'Longitude' in exif.keys()):\n\n if 'Original Date' in exif.keys():\n dt = exif['Original Date']\n else:\n dt = 'N/A'\n\n if exif['Latitude Reference'] == 'S':\n latitude = '-' + exif['Latitude']\n else:\n latitude = exif['Latitude']\n\n if exif['Longitude Reference'] == 'W':\n longitude = '-' + exif['Longitude']\n else:\n longitude = exif['Longitude']\n\n kml.newpoint(name=exif['Name'],\n description='Originally Created: ' + dt,\n coords=[(longitude, latitude)])\n else:\n pass\n kml.save(os.path.join(output, output_name))",
"def import_musicxml_file_idea(scorePath, museScoreFile):\n\n myScore = m.converter.parse(scorePath+'/'+museScoreFile, format='musicxml')\n num_parts=get_number_of_parts(myScore)\n print(\"number_of_parts:\",num_parts)\n \n # Limit max Parts that can be processed \n if num_parts > 2:\n sys.exit(\"Error: this program can only process max 2 parts input musicxml file!\\nProgram aborted.\") \n\n # loop over Parts\n part_cnt=0\n music_info=dict()\n key=''\n for p in myScore.recurse().parts:\n for e in p.recurse().getElementsByClass('TimeSignature'): # meter.timeSignature:\n print(\"time signature score: \", e)\n used_time_signature = e # Because of grant staff only use the last\n key='time_signature'+str(part_cnt)\n print('key:', key)\n music_info[key]=used_time_signature\n print('music_info[key]:',music_info[key])\n\n for e in myScore.recurse().getElementsByClass('KeySignature'): # meter.timeSignature:\n print(\"key signature score: \", e)\n used_key_signature = e # Because of grant staff only use the last\n key='key_signature'+str(part_cnt)\n print('key:', key)\n music_info[key]=used_key_signature\n print('music_info[key]:',music_info[key])\n\n time_list = []\n note_property_list=[]\n smallest_quarterlength=sys.float_info.max\n\n for element in myScore.recurse().notes:\n # Encoding X\n # Fill time\n time_list.append(element.measureNumber) \n time_list.append(element.offset) \n #print(\"Time_list iter:\", time_list)\n \n # Encoding Y \n # Fill note properties\n note_property_list.append(nc.getNoteValue(element.name))\n note_property_list.append(element.octave)\n note_property_list.append(element.duration.quarterLength)\n # search smallest quarterlength\n if element.duration.quarterLength < smallest_quarterlength:\n smallest_quarterlength = element.duration.quarterLength\n #print(\"Note_property_list iter:\", note_property_list)\n \n \n used_smallest_quarterlength = smallest_quarterlength \n key='smallest_quarterlength'+str(part_cnt)\n print('key:', key)\n music_info[key]=used_smallest_quarterlength \n print('music_info[key]:',music_info[key])\n\n # Create 2 dimensional array for the time list with 2 elements per row\n # First index -1 creates dynamically an amount off rows based on the size of the time list\n X = np.array(time_list).reshape(-1, 2)\n #print(\"X.shape\",X.shape)\n #print(X)\n\n # put in music_info\n used_X = X \n key='X'+str(part_cnt)\n print('key:', key)\n music_info[key]=used_X\n print('music_info[key]:',music_info[key])\n \n # Create 2 dimension array for the note property list with 3 elements per row\n # First index -1 creates dynamically an amount off rows based on the size of the note list\n Y = np.array(note_property_list).reshape(-1, 3)\n #print(\"Y.shape\",Y.shape)\n #print(Y)\n\n used_Y = Y \n key='Y'+str(part_cnt)\n print('key:', key)\n music_info[key]=used_Y\n print('music_info[key]:',music_info[key])\n\n part_cnt=part_cnt+1\n\n '''\n # Get used TimeSignature of input file\n for e in myScore.recurse().getElementsByClass('TimeSignature'): # meter.timeSignature:\n print(\"time signature score: \", e)\n used_time_signature = e # Because of grant staff only use the last\n ''' \n \n '''\n # Get used KeySignature of input file\n for e in myScore.recurse().getElementsByClass('KeySignature'): # meter.timeSignature:\n print(\"key signature score: \", e)\n used_key_signature = e # Because of grant staff only use the last\n '''\n\n ''' \n time_list = []\n note_property_list=[]\n smallest_quarterlength=sys.float_info.max\n '''\n \n '''\n for element in myScore.recurse().notes:\n # Encoding X\n # Fill time\n time_list.append(element.measureNumber) \n time_list.append(element.offset) \n #print(\"Time_list iter:\", time_list)\n \n # Encoding Y \n # Fill note properties\n note_property_list.append(nc.getNoteValue(element.name))\n note_property_list.append(element.octave)\n note_property_list.append(element.duration.quarterLength)\n # search smallest quarterlength\n if element.duration.quarterLength < smallest_quarterlength:\n smallest_quarterlength = element.duration.quarterLength\n #print(\"Note_property_list iter:\", note_property_list)\n \n # Create 2 dimensional array for the time list with 2 elements per row\n # First index -1 creates dynamically an amount off rows based on the size of the time list\n X = np.array(time_list).reshape(-1, 2)\n #print(\"X.shape\",X.shape)\n #print(X)\n \n # Create 2 dimension array for the note property list with 3 elements per row\n # First index -1 creates dynamically an amount off rows based on the size of the note list\n Y = np.array(note_property_list).reshape(-1, 3)\n #print(\"Y.shape\",Y.shape)\n #print(Y)\n '''\n \n '''\n return(X, Y, used_time_signature, used_key_signature, smallest_quarterlength) # import_musicxml_file_idea \n '''\n return(music_info) # import_musicxml_file_idea ",
"def convert_xml_to_unp(xml_root_path, unp_root_path):\n # check xml_root_path\n if \"lineStrokes\" not in xml_root_path:\n logger.error(\n \"Because of hard-coded path config, you should use `lineStrokes` \"\n \"directory for `xml_root_path` for now\"\n )\n\n # start!\n xml_names = get_specified_ext_fnames(xml_root_path, \".xml\")\n logger.info(f\"{len(xml_names)} files found!\")\n\n all_strokes = {}\n for fname in tqdm(xml_names):\n all_strokes[fname] = load_org_xml(fname)\n\n for fname in tqdm(xml_names):\n # ----------------\n # HARD-coded here!\n savename = fname.replace(xml_root_path, unp_root_path).replace('.xml', '.unp')\n # ----------------\n\n os.makedirs(os.path.dirname(savename), exist_ok=True)\n ds.save_sngyo_unp(all_strokes[fname], savename)\n\n logger.info(\"IAMONLINE's xml files are converted to unp files\")",
"def __extractChemDrawLocal(self, data, outputDir, name):\n tmpFs = self.iceContext.fs.createTempDirectory()\n tmpFs.writeFile(name, data)\n inputFile = tmpFs.absPath(name)\n \n if outputDir is None:\n basePath = tmpFs.absPath()\n tmpFs.makeDirectory(\"media\")\n outputDir = self.iceContext.fs.join(basePath, \"media\")\n \n self.__invokeCdx2Cml(inputFile, outputDir)\n \n tmpFs.zip(\"media.zip\", \"media\")\n content = tmpFs.readFile(\"media.zip\")\n tmpFs.delete()\n \n return content",
"def import_musicxml_file(scorePath, museScoreFile):\n\n myScore = m.converter.parse(scorePath+'/'+museScoreFile, format='musicxml')\n num_parts=get_number_of_parts(myScore)\n print(\"number_of_parts:\",num_parts)\n \n # Limit max Parts that can be processed \n if num_parts > 2:\n sys.exit(\"Error: this program can only process max 2 parts input musicxml file!\\nProgram aborted.\") \n\n\n # Get used TimeSignature of input file\n for e in myScore.recurse().getElementsByClass('TimeSignature'): # meter.timeSignature:\n print(\"time signature score: \", e)\n used_time_signature = e # Because of grant staff only use the last\n\n # Get used KeySignature of input file\n for e in myScore.recurse().getElementsByClass('KeySignature'): # meter.timeSignature:\n print(\"key signature score: \", e)\n used_key_signature = e # Because of grant staff only use the last\n\n time_list = []\n note_property_list=[]\n smallest_quarterlength=sys.float_info.max\n\n for element in myScore.recurse().notes:\n # Encoding X\n # Fill time\n time_list.append(element.measureNumber) \n time_list.append(element.offset) \n #print(\"Time_list iter:\", time_list)\n \n # Encoding Y \n # Fill note properties\n note_property_list.append(nc.getNoteValue(element.name))\n note_property_list.append(element.octave)\n note_property_list.append(element.duration.quarterLength)\n # search smallest quarterlength\n if element.duration.quarterLength < smallest_quarterlength:\n smallest_quarterlength = element.duration.quarterLength\n #print(\"Note_property_list iter:\", note_property_list)\n \n # Create 2 dimensional array for the time list with 2 elements per row\n # First index -1 creates dynamically an amount off rows based on the size of the time list\n X = np.array(time_list).reshape(-1, 2)\n #print(\"X.shape\",X.shape)\n #print(X)\n \n # Create 2 dimension array for the note property list with 3 elements per row\n # First index -1 creates dynamically an amount off rows based on the size of the note list\n Y = np.array(note_property_list).reshape(-1, 3)\n #print(\"Y.shape\",Y.shape)\n #print(Y)\n \n return(X, Y, used_time_signature, used_key_signature, smallest_quarterlength) # import_musicxml_file ",
"def facilities_as_kml(facilities):\n return KML.Folder(*[facility_as_kml(facility) for facility in facilities])",
"def load_ktrans(path):\n ext = \"-Ktrans.mhd\"\n img = sitk.ReadImage(path + ext)\n\n # Order z,y,x\n arr = sitk.GetArrayFromImage(img)\n origin = np.array(list(reversed(img.GetOrigin())))\n scale = np.array(list(reversed(img.GetSpacing())))\n arr = rescale_arr(arr, scale)\n\n return arr",
"def open_input(self):\n gdal.SetConfigOption(\"GDAL_PAM_ENABLED\", \"YES\")\n gdal.AllRegister()\n # self.options.verbose=True\n if self.options.tms_osm:\n self.s_y_type=\"osm\"\n else:\n self.s_y_type=\"tms\"\n if self.options.verbose:\n print \"open_input :\", self.input,\" osm[\",self.options.tms_osm,\",\",self.s_y_type,\"] mbtiles[\",self.options.mbtiles,\"] mbtiles_todisk[\",self.options.mbtiles_todisk,\"] mbtiles_fromdisk[\",self.options.mbtiles_fromdisk,\"]\";\n # Open the input file\n if self.input:\n self.in_ds = gdal.Open(self.input, gdal.GA_ReadOnly)\n else:\n raise Exception(\"No input file was specified\")\n\n if self.options.verbose:\n print \"Input file:\", \"( %sP x %sL - %s bands)\" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, self.in_ds.RasterCount)\n\n if not self.in_ds:\n # Note: GDAL prints the ERROR message too\n self.error(\"It is not possible to open the input file '%s'.\" % self.input )\n\n # Read metadata from the input file\n if self.in_ds.RasterCount == 0:\n self.error( \"Input file '%s' has no raster band\" % self.input )\n\n if self.in_ds.GetRasterBand(1).GetRasterColorTable():\n # TODO: Process directly paletted dataset by generating VRT in memory\n self.error( \"Please convert this file to RGB/RGBA and run gdal2mbtiles on the result.\",\n \"\"\"From paletted file you can create RGBA file (temp.vrt) by:\ngdal_translate -of vrt -expand rgba %s temp.vrt\nthen run:\ngdal2mbtiles temp.vrt\"\"\" % self.input )\n\n # Get NODATA value\n # User supplied values overwrite everything else.\n if self.options.srcnodata is not None:\n nds = map(float, self.options.srcnodata.split(','))\n if len(nds) < self.in_ds.RasterCount:\n self.in_nodata = (nds * self.in_ds.RasterCount)[:self.in_ds.RasterCount]\n else:\n self.in_nodata = nds\n else:\n # If the source dataset has NODATA, use it.\n self.in_nodata = []\n for i in range(1, self.in_ds.RasterCount+1):\n if self.in_ds.GetRasterBand(i).GetNoDataValue() != None:\n self.in_nodata.append( self.in_ds.GetRasterBand(i).GetNoDataValue() )\n\n if self.options.verbose:\n print \"NODATA: %s\" % self.in_nodata\n\n # INIT DEST\n if self.options.init_dest is not None:\n if self.options.tile_format == \"jpeg\":\n if self.in_ds.RasterCount == 4:\n nbands = 3\n else:\n nbands = self.in_ds.RasterCount\n\n nds = map(float, self.options.init_dest.split(','))\n\n if len(nds) == 1:\n init_dest = nds * nbands\n elif len(nds) == nbands:\n init_dest = nds\n else:\n print \"WARNING: you suplied %d '--init-dest' values but the dataset has %d data bands\" % (len(nds), nbands)\n init_dest = None\n else:\n init_dest = None\n print \"WARNING: --init-dest can be used only with 'jpeg' tile format\"\n else:\n if self.options.tile_format == \"jpeg\":\n init_dest = [255,255,255]\n else:\n init_dest = None\n\n #\n # Here we should have RGBA input dataset opened in self.in_ds\n #\n\n if self.options.verbose:\n print \"Preprocessed file:\", \"( %sP x %sL - %s bands)\" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, self.in_ds.RasterCount)\n\n # Spatial Reference System of the input raster\n\n\n self.in_srs = None\n\n if self.options.s_srs:\n self.in_srs = osr.SpatialReference()\n self.in_srs.SetFromUserInput(self.options.s_srs)\n self.in_srs_wkt = self.in_srs.ExportToWkt()\n else:\n self.in_srs_wkt = self.in_ds.GetProjection()\n if not self.in_srs_wkt and self.in_ds.GetGCPCount() != 0:\n self.in_srs_wkt = self.in_ds.GetGCPProjection()\n if self.in_srs_wkt:\n self.in_srs = osr.SpatialReference()\n self.in_srs.ImportFromWkt(self.in_srs_wkt)\n #elif self.options.profile != 'raster':\n # self.error(\"There is no spatial reference system info included in the input file.\",\"You should run gdal2mbtiles with --s_srs EPSG:XXXX or similar.\")\n\n # Spatial Reference System of tiles\n\n self.out_srs = osr.SpatialReference()\n\n if self.options.profile == 'mercator':\n self.out_srs.ImportFromEPSG(900913)\n elif self.options.profile in ('geodetic', 'gearth', 'garmin'):\n self.out_srs.ImportFromEPSG(4326)\n else:\n self.out_srs = self.in_srs\n\n # Are the reference systems the same? Reproject if necessary.\n\n self.out_ds = None\n\n if self.options.profile in ('mercator', 'geodetic', 'gearth', 'garmin'):\n\n if (self.in_ds.GetGeoTransform() == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) and (self.in_ds.GetGCPCount() == 0):\n self.error(\"There is no georeference - neither affine transformation (worldfile) nor GCPs. You can generate only 'raster' profile tiles.\",\n \"Either gdal2mbtiles with parameter -p 'raster' or use another GIS software for georeference e.g. gdal_transform -gcp / -a_ullr / -a_srs\")\n\n if self.in_srs:\n\n if (self.in_srs.ExportToProj4() != self.out_srs.ExportToProj4()) or (self.in_ds.GetGCPCount() != 0):\n\n # Generation of VRT dataset in tile projection, default 'nearest neighbour' warping\n self.out_ds = gdal.AutoCreateWarpedVRT( self.in_ds, self.in_srs_wkt, self.out_srs.ExportToWkt() )\n\n # TODO: HIGH PRIORITY: Correction of AutoCreateWarpedVRT according the max zoomlevel for correct direct warping!!!\n\n if self.options.verbose:\n print \"Warping of the raster by AutoCreateWarpedVRT (result saved into 'tiles.vrt')\"\n self.out_ds.GetDriver().CreateCopy(\"tiles.vrt\", self.out_ds)\n\n # Note: self.in_srs and self.in_srs_wkt contain still the non-warped reference system!!!\n\n # Correction of AutoCreateWarpedVRT for NODATA values\n if self.in_nodata != []:\n import tempfile\n tempfilename = tempfile.mktemp('-gdal2mbtiles.vrt')\n self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds)\n # open as a text file\n s = open(tempfilename).read()\n # Add the warping options\n s = s.replace(\"\"\"<GDALWarpOptions>\"\"\",\"\"\"<GDALWarpOptions>\n <Option name=\"UNIFIED_SRC_NODATA\">YES</Option>\n <Option name=\"INIT_DEST\">NO_DATA</Option>\"\"\")\n # replace BandMapping tag for NODATA bands....\n if init_dest is None:\n dstnodata = self.in_nodata\n else:\n dstnodata = init_dest\n for i in range(len(self.in_nodata)):\n s = s.replace(\"\"\"<BandMapping src=\"%i\" dst=\"%i\"/>\"\"\" % ((i+1),(i+1)),\"\"\"<BandMapping src=\"%i\" dst=\"%i\">\n <SrcNoDataReal>%i</SrcNoDataReal>\n <SrcNoDataImag>0</SrcNoDataImag>\n <DstNoDataReal>%i</DstNoDataReal>\n <DstNoDataImag>0</DstNoDataImag>\n </BandMapping>\"\"\" % ((i+1), (i+1), self.in_nodata[i], dstnodata[i]))\n # save the corrected VRT\n open(tempfilename,\"w\").write(s)\n # open by GDAL as self.out_ds\n self.out_ds = gdal.Open(tempfilename) #, gdal.GA_ReadOnly)\n # delete the temporary file\n os.unlink(tempfilename)\n\n # set NODATA_VALUE metadata\n self.out_ds.SetMetadataItem('NODATA_VALUES','%s' % \" \".join(str(int(f)) for f in self.in_nodata))\n\n if self.options.verbose:\n print \"Modified warping result saved into 'tiles1.vrt'\"\n open(\"tiles1.vrt\",\"w\").write(s)\n\n # -----------------------------------\n # Correction of AutoCreateWarpedVRT for Mono (1 band) and RGB (3 bands) files without NODATA:\n # equivalent of gdalwarp -dstalpha\n elif self.in_nodata == [] and self.out_ds.RasterCount in (1,3):\n import tempfile\n tempfilename = tempfile.mktemp('-gdal2mbtiles.vrt')\n self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds)\n # open as a text file\n s = open(tempfilename).read()\n # Add the warping options\n s = s.replace(\"\"\"<BlockXSize>\"\"\",\"\"\"<VRTRasterBand dataType=\"Byte\" band=\"%i\" subClass=\"VRTWarpedRasterBand\">\n <ColorInterp>Alpha</ColorInterp>\n </VRTRasterBand>\n <BlockXSize>\"\"\" % (self.out_ds.RasterCount + 1))\n s = s.replace(\"\"\"</GDALWarpOptions>\"\"\", \"\"\"<DstAlphaBand>%i</DstAlphaBand>\n </GDALWarpOptions>\"\"\" % (self.out_ds.RasterCount + 1))\n if init_dest is None:\n init_dest_str = \"0\"\n else:\n init_dest_str = \",\".join(str(f) for f in init_dest)\n s = s.replace(\"\"\"</WorkingDataType>\"\"\", \"\"\"</WorkingDataType>\n <Option name=\"INIT_DEST\">%s</Option>\"\"\" % init_dest_str)\n # save the corrected VRT\n open(tempfilename,\"w\").write(s)\n # open by GDAL as self.out_ds\n self.out_ds = gdal.Open(tempfilename) #, gdal.GA_ReadOnly)\n # delete the temporary file\n os.unlink(tempfilename)\n\n if self.options.verbose:\n print \"Modified -dstalpha warping result saved into 'tiles1.vrt'\"\n open(\"tiles1.vrt\",\"w\").write(s)\n\n elif init_dest is not None:\n import tempfile\n tempfilename = tempfile.mktemp('-gdal2mbtiles.vrt')\n self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds)\n # open as a text file\n s = open(tempfilename).read()\n # Add the warping options\n s = s.replace(\"\"\"</WorkingDataType>\"\"\", \"\"\"</WorkingDataType>\n <Option name=\"INIT_DEST\">%s</Option>\"\"\" % \",\".join(str(f) for f in init_dest))\n # save the corrected VRT\n open(tempfilename,\"w\").write(s)\n # open by GDAL as self.out_ds\n self.out_ds = gdal.Open(tempfilename) #, gdal.GA_ReadOnly)\n # delete the temporary file\n os.unlink(tempfilename)\n\n if self.options.verbose:\n print \"Modified warping result saved into 'tiles1.vrt'\"\n open(\"tiles1.vrt\",\"w\").write(s)\n\n # For raster with 4-bands: 4th unknown band set to alpha\n if (self.out_ds.RasterCount == 4\n and self.out_ds.GetRasterBand(4).GetRasterColorInterpretation() == gdal.GCI_Undefined):\n self.out_ds.GetRasterBand(4).SetRasterColorInterpretation(gdal.GCI_AlphaBand)\n\n s = '''\n '''\n\n else:\n self.error(\"Input file has unknown SRS.\", \"Use --s_srs ESPG:xyz (or similar) to provide source reference system.\" )\n\n if self.out_ds and self.options.verbose:\n print \"Projected file:\", \"tiles.vrt\", \"( %sP x %sL - %s bands)\" % (self.out_ds.RasterXSize, self.out_ds.RasterYSize, self.out_ds.RasterCount)\n\n if not self.out_ds:\n self.out_ds = self.in_ds\n\n #\n # Here we should have a raster (out_ds) in the correct Spatial Reference system\n #\n\n # KML test\n self.isepsg4326 = False\n srs4326 = osr.SpatialReference()\n srs4326.ImportFromEPSG(4326)\n if self.out_srs and srs4326.ExportToProj4() == self.out_srs.ExportToProj4():\n self.kml = True\n self.isepsg4326 = True\n if self.options.verbose:\n print \"KML autotest OK!\"\n\n # Read the georeference\n\n self.out_gt = self.out_ds.GetGeoTransform()\n\n #originX, originY = self.out_gt[0], self.out_gt[3]\n #pixelSize = self.out_gt[1] # = self.out_gt[5]\n\n # Test the size of the pixel\n\n # MAPTILER - COMMENTED\n #if self.out_gt[1] != (-1 * self.out_gt[5]) and self.options.profile != 'raster':\n # TODO: Process corectly coordinates with are have swichted Y axis (display in OpenLayers too)\n #self.error(\"Size of the pixel in the output differ for X and Y axes.\")\n\n # Report error in case rotation/skew is in geotransform (possible only in 'raster' profile)\n if (self.out_gt[2], self.out_gt[4]) != (0,0):\n self.error(\"Georeference of the raster contains rotation or skew. Such raster is not supported. Please use gdalwarp first.\")\n # TODO: Do the warping in this case automaticaly\n\n #\n # Here we expect: pixel is square, no rotation on the raster\n #\n\n # Output Bounds - coordinates in the output SRS\n self.ominx = self.out_gt[0]\n self.omaxx = self.out_gt[0]+self.out_ds.RasterXSize*self.out_gt[1]\n self.omaxy = self.out_gt[3]\n self.ominy = self.out_gt[3]-self.out_ds.RasterYSize*self.out_gt[1]\n # Note: maybe round(x, 14) to avoid the gdal_translate behaviour, when 0 becomes -1e-15\n # user defined bounds to extract - coordinates in the output SRS\n if self.options.te_bounds != '':\n if self.te_minx >= self.ominx and self.te_minx <= self.omaxx:\n if self.te_maxx >= self.ominx and self.te_maxx <= self.omaxx:\n if self.te_miny >= self.ominy and self.te_miny <= self.omaxy:\n if self.te_maxy >= self.ominy and self.te_maxy <= self.omaxy:\n # replace only if inside the read bounds\n self.ominx = self.te_minx\n self.omaxx = self.te_maxx\n self.ominy = self.te_miny\n self.omaxy = self.te_maxy\n if self.options.verbose:\n print \"User defined Bounds (output srs) have been set:\", round(self.ominx, 13), self.ominy, self.omaxx, self.omaxy\n\n if self.options.verbose:\n print \"Bounds (output srs):\", round(self.ominx, 13), self.ominy, self.omaxx, self.omaxy\n\n if self.options.mbtiles:\n self.options.profile = 'mercator'\n if self.options.profile == 'mercator':\n self.mercator = GlobalMercator(self.options.tms_osm) # from globalmaptiles.py\n\n #\n # Calculating ranges for tiles in different zoom levels\n #\n\n # Function which generates SWNE in LatLong for given tile\n self.tileswne = self.mercator.TileLatLonBounds\n\n # Generate table with min max tile coordinates for all zoomlevels\n self.tminmax = range(0,32)\n for tz in range(0, 32):\n tminx, tminy = self.mercator.MetersToTile( self.ominx, self.ominy, tz )\n tmaxx, tmaxy = self.mercator.MetersToTile( self.omaxx, self.omaxy, tz )\n # crop tiles extending world limits (+-180,+-90)\n tminx, tminy = max(0, tminx), max(0, tminy)\n tmaxx, tmaxy = min(2**tz-1, tmaxx), min(2**tz-1, tmaxy)\n self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy)\n\n # TODO: Maps crossing 180E (Alaska?)\n\n # Get the minimal zoom level (map covers area equivalent to one tile)\n if self.tminz == None:\n self.tminz = self.mercator.ZoomForPixelSize( self.out_gt[1] * max( self.out_ds.RasterXSize, self.out_ds.RasterYSize) / float(self.tilesize) )\n\n # Get the maximal zoom level (closest possible zoom level up on the resolution of raster)\n if self.tmaxz == None:\n self.tmaxz = self.mercator.ZoomForPixelSize( self.out_gt[1] )\n\n if self.options.verbose:\n print \"Bounds (latlong):\", self.mercator.MetersToLatLon( self.ominx, self.ominy), self.mercator.MetersToLatLon( self.omaxx, self.omaxy)\n print 'MinZoomLevel:', self.tminz\n print \"MaxZoomLevel:\", self.tmaxz, \"(\", self.mercator.Resolution( self.tmaxz ),\")\"\n\n # this must be call befor ImageOutput is called (self.output may be changed)\n if self.options.mbtiles:\n if not self.mbtiles_db:\n self.mbtiles_setup(1);\n\n # Instantiate image output.\n self.image_output = ImageOutput(self.options.tile_format, self.out_ds, self.tilesize,\n self.options.resampling, init_dest, self.output,\n self.options.verbose,self.options.mbtiles)\n if self.options.profile == 'geodetic':\n\n self.geodetic = GlobalGeodetic() # from globalmaptiles.py\n\n # Function which generates SWNE in LatLong for given tile\n self.tileswne = self.geodetic.TileLatLonBounds\n\n # Generate table with min max tile coordinates for all zoomlevels\n self.tminmax = range(0,32)\n for tz in range(0, 32):\n tminx, tminy = self.geodetic.LatLonToTile( self.ominx, self.ominy, tz )\n tmaxx, tmaxy = self.geodetic.LatLonToTile( self.omaxx, self.omaxy, tz )\n # crop tiles extending world limits (+-180,+-90)\n tminx, tminy = max(0, tminx), max(0, tminy)\n tmaxx, tmaxy = min(2**(tz+1)-1, tmaxx), min(2**tz-1, tmaxy)\n self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy)\n\n # TODO: Maps crossing 180E (Alaska?)\n\n # Get the maximal zoom level (closest possible zoom level up on the resolution of raster)\n if self.tminz == None:\n self.tminz = self.geodetic.ZoomForPixelSize( self.out_gt[1] * max( self.out_ds.RasterXSize, self.out_ds.RasterYSize) / float(self.tilesize) )\n\n # Get the maximal zoom level (closest possible zoom level up on the resolution of raster)\n if self.tmaxz == None:\n self.tmaxz = self.geodetic.ZoomForPixelSize( self.out_gt[1] )\n\n if self.options.verbose:\n print \"Bounds (latlong):\", self.ominx, self.ominy, self.omaxx, self.omaxy\n\n if self.options.profile in ('raster', 'gearth', 'garmin'):\n\n log2 = lambda x: math.log10(x) / math.log10(2) # log2 (base 2 logarithm)\n\n self.nativezoom = int(max( math.ceil(log2(self.out_ds.RasterXSize/float(self.tilesize))),\n math.ceil(log2(self.out_ds.RasterYSize/float(self.tilesize)))))\n\n if self.options.verbose:\n print \"Native zoom of the raster:\", self.nativezoom\n\n # Get the minimal zoom level (whole raster in one tile)\n if self.tminz == None:\n self.tminz = 0\n\n # Get the maximal zoom level (native resolution of the raster)\n if self.tmaxz == None:\n self.tmaxz = self.nativezoom\n\n # Garmin has maximally 100 tiles - lower the tmaxz if necessary\n if self.options.profile == 'garmin':\n tno = math.ceil(self.out_ds.RasterXSize / self.tilesize) * math.ceil(self.out_ds.RasterYSize / self.tilesize)\n for tz in range(self.tmaxz, 1, -1):\n if tno > 100:\n tno /= 4\n self.tmaxz -= 1\n print \"Warning: GARMIN has a limit 100 tiles per device: lowering the max zoom level to:\", self.tmaxz\n else:\n continue\n\n # Force only one zoom level for the 'garmin' tile profile\n if self.options.profile == 'garmin':\n self.tminz = self.tmaxz\n\n # Generate table with min max tile coordinates for all zoomlevels\n self.tminmax = range(0, self.tmaxz+1)\n self.tsize = range(0, self.tmaxz+1)\n for tz in range(0, self.tmaxz+1):\n tsize = 2.0**(self.nativezoom-tz)*self.tilesize\n tminx, tminy = 0, 0\n tmaxx = int(math.ceil( self.out_ds.RasterXSize / tsize )) - 1\n tmaxy = int(math.ceil( self.out_ds.RasterYSize / tsize )) - 1\n self.tsize[tz] = math.ceil(tsize)\n self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy)\n\n # Function which generates SWNE in LatLong for given tile\n if self.kml and self.in_srs_wkt:\n self.ct = osr.CoordinateTransformation(self.in_srs, srs4326)\n def rastertileswne(x,y,z):\n pixelsizex = (2**(self.nativezoom-z) * self.out_gt[1]) # X-pixel size in level\n pixelsizey = (2**(self.nativezoom-z) * self.out_gt[5]) # Y-pixel size in level (usually -1*pixelsizex)\n west = self.out_gt[0] + x*self.tilesize*pixelsizex\n east = west + self.tilesize*pixelsizex\n south = self.ominy + y*self.tilesize*pixelsizex\n north = south + self.tilesize*pixelsizex\n if not self.isepsg4326:\n # Transformation to EPSG:4326 (WGS84 datum)\n west, south = self.ct.TransformPoint(west, south)[:2]\n east, north = self.ct.TransformPoint(east, north)[:2]\n return south, west, north, east\n\n self.tileswne = rastertileswne\n else:\n self.tileswne = lambda x, y, z: (0,0,0,0)",
"def file(self,file):\n self.lib.lammps_file(self.lmp,file.encode('utf-8'))",
"def convert_testing_data(mfccPath):\n inputlist, inputnamelist = ark_parser(mfccPath, 'test.ark')\n\n print(\"%d sample in testing set\" % len(inputlist))\n with open('./test_data.pkl', 'wb') as test_data:\n pickle.dump(inputlist, test_data)\n \n with open('./test_name.pkl', 'wb') as test_name:\n pickle.dump(inputnamelist, test_name)",
"def get_kml_dict(self, tx, ty_tms, tz, image_format, draworder = 0):\n d = {}\n\n d[\"south\"], d[\"west\"], d[\"north\"], d[\"east\"] = self.tileswne(tx, ty_tms, tz)\n\n image_filename = get_tile_filename(tx, ty_tms, tz, format_extension[image_format],False)\n d[\"image_filename\"] = image_filename\n d[\"image_filename\"] = d[\"image_filename\"].replace(\"\\\\\",\"/\")\n\n if self.options.url is None:\n d[\"image_url\"] = \"../../%s\" % image_filename\n else:\n d[\"image_url\"] = \"%s%s\" % (self.options.url, image_filename)\n d[\"image_url\"] = d[\"image_url\"].replace(\"\\\\\",\"/\")\n\n url = self.options.url\n if url is None:\n # Top level KML is linked from `doc.kml' and it needs different path.\n if tz == self.tminz:\n url = \"\"\n else:\n url = \"../../\"\n\n if self.options.kmz:\n extension = \"kmz\"\n else:\n extension = \"kml\"\n\n d[\"link_url\"] = \"%s%s\" % (url, get_tile_filename(tx, ty_tms, tz, extension,False))\n d[\"link_url\"] = d[\"link_url\"].replace(\"\\\\\",\"/\")\n\n d[\"minlodpixels\"] = int(self.tilesize / 2)\n d[\"maxlodpixels\"] = -1 # int(self.tilesize * 8)\n\n if tx == 0:\n d[\"draw_order\"] = draworder + 2 * tz + 1\n else:\n d[\"draw_order\"] = draworder + 2 * tz\n\n return d",
"def build(filename=\"JMdict_e.gz\", output_filename=DATABASE_FILENAME):\n # NOTE: The JMdict XML file contains XML entities, that are expanded when\n # parsed using Python's stdlib xml.etree.ElementTree like so:\n # ElementTree.parse(f). That is undesired behavior for our use-case. Oshi\n # needs to parse the short entity string, for example &adj-i; should be\n # \"adj-i\" instead of \"adjective (keiyoushi)\". That's why it uses an external\n # xml parser: lxml that allows you to specify whether to expand entites.\n extension = path.splitext(filename)[1].lower()\n parser = etree.XMLParser(resolve_entities=False)\n if extension == \".gz\":\n with gzip.open(filename) as f:\n tree = etree.parse(f, parser)\n elif extension == \".xml\":\n tree = etree.parse(filename, parser)\n else:\n raise ValueError(\"File extension not supported: \" + extension)\n\n entries = []\n # variables starting with x contain xml element(s)\n for xentry in tree.getroot():\n entry = {}\n entry[\"writings\"] = [x.find('keb').text for x in xentry.findall('k_ele')]\n entry[\"readings\"] = [x.find('reb').text for x in xentry.findall('r_ele')]\n xsenses = xentry.findall('sense')\n senses = []\n # last_tags will contain a reference to previously found tags (JMdict\n # specifies that when pos is empty, the previous one should be used)\n last_tags = []\n for xsense in xsenses:\n tags = []\n xtags = xsense.findall('pos') # + xsense.findall('misc')\n for xtag in xtags:\n match = re.search(r'&([\\w-]+?);', etree.tostring(xtag, encoding=\"utf-8\").decode('utf-8') or \"\")\n if match: tags.append(match.group(1))\n glosses = [x.text for x in xsense.findall('gloss')]\n senses.append({\"glosses\": glosses, \"tags\": tags or last_tags})\n last_tags = tags or last_tags\n entry[\"senses\"] = senses\n entries.append(entry)\n with open(output_filename, 'w', encoding='utf-8') as f:\n json.dump(entries, f, ensure_ascii=False)",
"def k2lc(epic):\n prefix = epic[:4]\n id = epic[4:]\n c = \"01\"\n path = \"data/c01/{0}00000/{1}\".format(prefix, id)\n end = \"kepler_v1.0_lc.fits\"\n file = \"{0}/hlsp_everest_k2_llc_{1}-c{2}_{3}\".format(path, epic, c, end)\n x, y = process_data(file)\n return x, y",
"def makeModel(self):\n\n # Get the script\n modelScript = os.path.join(self.datapath, 'make3FGLxml.py')\n if not os.path.isfile(modelScript):\n # download it\n print(\"\\t=== Downloading make3FGLxml.py ===\")\n os.system('wget https://fermi.gsfc.nasa.gov/ssc/data/analysis/user/make3FGLxml.py -O {}'.format(modelScript))\n\n # Create the model using Tyrel's script\n galModel = os.path.join(self.diffpath, 'gll_iem_v06.fits')\n isoModel = os.path.join(self.diffpath, 'iso_'+self.irf+'_v06.txt')\n if (not os.path.isfile(galModel)) or (not os.path.isfile(isoModel)):\n print(\"\\t=== Unable to find the diffuse models, check the variable '$FERMI_DIR' ===\")\n return\n if not os.path.isdir(self.extpath):\n print(\"\\t=== Unable to find models of extended sources, check the variable '$LATEXTDIR' ===\")\n return\n if not os.path.isfile(self.fermicat):\n # download it\n print(\"\\t=== Downloading 3FGL catalog ===\")\n os.system('wget https://fermi.gsfc.nasa.gov/ssc/data/access/lat/4yr_catalog/gll_psc_v16.fit -O {}'.format(self.fermicat))\n\n os.popen(\"python {} {} {} -o {} -G {} -g 'gll_iem_v06'\\\n -I {} -i 'iso_source_v06' -e {} -r 5 -R 10 -ER 10\\\n -s 9 -m False -GIF False\".format(modelScript, self.fermicat,\n self.ft1, self.model, galModel, isoModel, self.extpath))\n\n # Add the target to the model\n tmpName = self.model + '.tmp'\n rfil = open(self.model, 'r')\n wfil = open(tmpName, 'w')\n # Copy the XML to the temporary model\n wfil.writelines([l for l in rfil.readlines() if not l=='</source_library>']) # copy everything but the last line\n wfil.write(' <source ROI_Center_Distance=\"0.00\" name=\"TARGET\" type=\"PointSource\">\\n')\n wfil.write(' <spectrum type=\"PowerLaw2\">\\n')\n wfil.write(' <parameter free=\"1\" max=\"1000\" min=\"1e-05\" name=\"Integral\" scale=\"1e-08\" value=\"0.3591824258\"/>\\n')\n wfil.write(' <parameter free=\"1\" max=\"1\" min=\"-5\" name=\"Index\" scale=\"1\" value=\"-2.7\"/>\\n')\n wfil.write(' <parameter free=\"0\" max=\"1000000\" min=\"20\" name=\"LowerLimit\" scale=\"1\" value=\"100\"/>\\n')\n wfil.write('<parameter free=\"0\" max=\"1000000\" min=\"20\" name=\"UpperLimit\" scale=\"1\" value=\"100000\"/>\\n')\n wfil.write(' </spectrum>\\n')\n wfil.write(' <spatialModel type=\"SkyDirFunction\">\\n')\n wfil.write(' <parameter free=\"0\" max=\"360.0\" min=\"-360.0\" name=\"RA\" scale=\"1.0\" value=\"'+str(self.ra)+'\"/>\\n')\n wfil.write(' <parameter free=\"0\" max=\"360.0\" min=\"-360.0\" name=\"DEC\" scale=\"1.0\" value=\"'+str(self.dec)+'\"/>\\n')\n wfil.write(' </spatialModel>\\n')\n wfil.write(' </source>\\n')\n wfil.write('</source_library>\\n')\n rfil.close()\n wfil.close()\n\n os.remove(self.model)\n os.rename(tmpName, self.model)\n \n print(\"\\t=== Source model {} added ===\".format(self.model))\n return",
"def convert_poi(input, output, map_category=(), map_icon=(), ignore_tags=()):\n\n # some tools write v2, others v3. Use RegExp to find which\n rx = re.compile(\"xmlns:([^= ]+) *=['\\\"]([^'\\\"]+/GpxExtensions/[^'\\\"]+)\")\n m = None\n for line in input:\n m = rx.search(line)\n if m:\n break\n\n if m:\n gpxx_ns = m.group(1)\n gpxx_url = m.group(2)\n logging.info(\"Found XML Namespace %s=%s\", gpxx_ns, gpxx_url)\n else:\n gpxx_ns = \"gpxx\"\n gpxx_url = \"http://www.garmin.com/xmlschemas/GpxExtensions/v3\"\n logging.warning(\"No XML Namespace for GpxExtensions! Using %s=%s\",\n gpxx_ns, gpxx_url)\n\n ns = {\n \"xsi\": \"http://www.w3.org/2001/XMLSchema-instance\",\n \"gpx\": \"http://www.topografix.com/GPX/1/1\",\n \"gpxd\": \"http://www.daimler.com/DaimlerGPXExtensions/V2.7.2\",\n gpxx_ns: gpxx_url\n }\n for k, v in ns.items():\n ET.register_namespace(k, v)\n\n\n logging.info(\"Parsing %s\", input)\n input.seek(0)\n indoc = ET.parse(input)\n outroot = ET.Element(\"{http://www.topografix.com/GPX/1/1}gpx\", attrib={\n \"version\": \"1.1\",\n \"creator\": indoc.getroot().get(\"creator\",\n \"gpx-poi-garmin-to-mercedesbenz\"),\n \"{http://www.w3.org/2001/XMLSchema-instance}schemaLocation\": \"http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd\",\n })\n\n it = []\n for i in ignore_tags:\n if i.startswith(\"{\"):\n it.append(i)\n else:\n i = i.split(':', 1)\n if len(i) == 1:\n i = ('gpx', i[0])\n it_ns = ns.get(i[0], i[0])\n it_tag = i[1]\n it.append(\"{%s}%s\" % (it_ns, it_tag))\n\n ignore_tags = it\n\n logging.info(\"Converting...\")\n for wpt in indoc.findall(\"gpx:wpt\", ns):\n wpt_out = ET.Element(wpt.tag, attrib=wpt.attrib)\n link_href = \"\"\n for cel in wpt:\n if cel.tag == \"{http://www.topografix.com/GPX/1/1}link\":\n link_href = cel.get(\"href\", \"\")\n\n for i in ignore_tags:\n if cel.tag == i:\n break\n else:\n if cel.tag == \"{http://www.topografix.com/GPX/1/1}extensions\":\n _convert_wpt_extension(cel, wpt_out, ns, link_href,\n map_category, map_icon, ignore_tags)\n else:\n _copy_gpx_tags(cel, wpt_out)\n\n if len(wpt_out):\n outroot.append(wpt_out)\n\n if len(outroot) > 30000:\n logging.warning(\"Writing %d entries (>30000!) to %s\", len(outroot), output)\n else:\n logging.info(\"Writing %d entries to %s\", len(outroot), output)\n ET.ElementTree(outroot).write(output, encoding=\"utf-8\", xml_declaration=True)\n logging.info(\"Finished\")",
"def convert(tree,fileName=None):\n rootNode = tree.getroot()\n if rootNode.tag not in ['Simulation', 'OutStreamManager', 'Steps']:\n ## This is not a valid input file, or at least not one we care about for\n ## this conversion\n return tree\n osmNode = None\n stepsNode = None\n if rootNode.tag == 'Simulation':\n osmNode = rootNode.find('OutStreamManager')\n stepsNode = rootNode.find('Steps')\n elif rootNode.tag == 'outstreamManager':\n ## Case for when the OutStreamManager node is specified in an external file.\n ## (Steps should not be in this file?)\n osmNode = rootNode\n elif rootNode.tag == 'Steps':\n ## Case for when the Steps node is specified in an external file.\n ## (OutStreamManager should not be in this file?)\n stepsNode = rootNode\n\n if osmNode is not None:\n osmNode.tag = 'OutStreams'\n\n if stepsNode is not None:\n for outputNode in stepsNode.iter('Output'):\n if 'class' in outputNode.attrib and outputNode.attrib['class'] == 'OutStreamManager':\n outputNode.attrib['class'] = 'OutStreams'\n\n return tree",
"def write_to_kml(gps_df, output_path):\n coordinates = []\n for index, row in gps_df.iterrows():\n lat = (1 if row['Lat_dir'] == 'N' else -1) * (float(row['Lat'][0:2]) + (float(row['Lat'][2:]) / 60))\n long = (1 if row['Long_dir'] == 'E' else -1) * (float(row['Long'][0:3]) + (float(row['Long'][3:]) / 60))\n speed = row['Speed']\n coordinates.append((long, lat, speed))\n\n kml_file = kml.newlinestring(name='line', coords=coordinates)\n kml_file.linestyle.color = simplekml.Color.cyan\n kml_file.linestyle.width = 3\n kml_file.polystyle.color = simplekml.Color.cyan\n kml_file.altitudemode = simplekml.AltitudeMode.relativetoground\n kml_file.extrude = 1\n\n # stores all coordinates into the output file\n with open(output_path, \"w+\"):\n kml.save(output_path, format=True)",
"def parse_CRAFT(kb_data):\n\n print(\"Parsing CRAFT corpus...\")\n corpus_dir = str()\n \n if kb_data.kb == \"chebi\":\n corpus_dir = \"./retrieved_data/corpora/CRAFT-4.0.1/concept-annotation/CHEBI/CHEBI/knowtator/\"\n \n elif kb_data.kb == \"go_bp\":\n corpus_dir = \"./retrieved_data/corpora/CRAFT-4.0.1/concept-annotation/GO_BP/GO_BP/knowtator/\"\n\n output_CRAFT = dict()\n \n for document in os.listdir(corpus_dir): \n root = ET.parse(corpus_dir + document)\n file_id = document.strip('.txt.knowtator.xml')\n annotations = dict()\n\n for annotation in root.iter(\"annotation\"):\n annotation_id = annotation.find('mention').attrib['id']\n annotation_text = annotation.find('spannedText').text\n start_pos, end_pos = annotation.find('span').attrib['start'], annotation.find('span').attrib['end']\n annotations[annotation_id] = [annotation_text, start_pos, end_pos] \n \n for classMention in root.iter(\"classMention\"):\n classMention_id = classMention.attrib['id']\n annotation_values = annotations[classMention_id]\n kb_id = classMention.find('mentionClass').attrib['id']\n \n if kb_id in kb_data.child_to_parent.keys(): # Consider only KB concepts with ONE direct ancestor\n direct_ancestor = kb_data.child_to_parent[kb_id]\n annotation = (annotation_values[0], annotation_values[1], \n annotation_values[2], kb_id, direct_ancestor) \n output_CRAFT = add_annotation_to_output_dict(file_id, annotation, output_CRAFT)\n \n print(\"...Done!\")\n return output_CRAFT",
"def package():\n \n hou.hipFile.save()\n currentHip = hou.expandString(hou.hipFile.name())\n\n # create a temp directory we are going to fill with crap\n tempFilePath = tempfile.mkdtemp()\n \n otls = os.path.join(tempFilePath, \"otls\")\n os.mkdir(otls)\n files = os.path.join(tempFilePath, \"files\")\n os.mkdir(files)\n \n # Get all the external references to the hipfile\n fileOnDisk = hou.fileReferences()\n\n # loop and do what comes natural.\n for _file in fileOnDisk:\n\n parm = _file[0]\n filepath = _file[1]\n \n # if its a otl we need to store it.\n if filepath.endswith(\".otl\"):\n \n shutil.copy(hou.expandString(filepath), otls)\n \n else:\n \n if not os.path.isfile(hou.expandString(filepath)): \n \n continue\n \n # create a directory in files and save 1 file to that location\n tmpFileName = os.path.basename(hou.expandString(filepath))\n tmpFileDir = os.path.basename(os.path.dirname(hou.expandString(filepath)))\n path = os.path.join(files, tmpFileDir)\n \n if not os.path.isdir(path):\n \n os.mkdir(path)\n\n shutil.copy(hou.expandString(filepath), os.path.join(path, os.path.basename(hou.expandString(filepath))))\n\n try:\n \n if not parm.node().isLocked():\n \n parm.set(os.path.join(path.replace(tempFilePath, \"$HIP\"), tmpFileName))\n \n except hou.PermissionError: \n \n logging.warning(\"Error hardening parm :\" + str(parm.name()) + \"on node \" +parm.node().path())\n\n hou.hipFile.save(os.path.join(tempFilePath, os.path.basename(hou.expandString(hou.hipFile.name()))))\n # Load the source hipfile\n hou.hipFile.load(currentHip)\n \n # create a zipfile and package everything. then copy it to the home.\n zipfileLoc = zipdir(tempFilePath)\n shutil.move(zipfileLoc, os.path.join(hou.expandString(\"~\"), \"package.zip\"))\n shutil.rmtree(tempFilePath)",
"def lkg_to_json(lkg, filename):\n\n nx_graph = lkg_to_nx(lkg)\n nx_to_json(nx_graph, filename)",
"def main(\n):\n music_home = \"/home/banana/music\"\n for inode in list_dir(music_home):\n if basename(inode) in [\n \"annotate\",\n \"metadata\",\n \"sped-up\",\n \"tracklists\",\n ] or isfile(inode):\n continue\n convert(inode)",
"def read_fn(file_references, mode, params=None):\r\n print('Reading the dataset from Datalakestore (2mm NIfTI images)....')\r\n\r\n def _augment(img):\r\n \"\"\"An image augmentation function\"\"\"\r\n return flip(img, axis=2)\r\n\r\n image_array = []\r\n label_array = []\r\n for f in file_references:\r\n subject_id = f[0]\r\n\r\n # Read the image nii with sitk\r\n ##t1_fn = os.path.join(data_path, '{}/T1_2mm.nii.gz'.format(subject_id))\r\n ##t1 = sitk.GetArrayFromImage(sitk.ReadImage(str(t1_fn)))\r\n t1_fn = os.path.join(data_path, '{}/T1_2mm.nii.gz'.format(subject_id))\r\n print(t1_fn)\r\n #with adlsFileSystemClient.open(t1_fn, 'rb') as f:\r\n # img = sitk.ReadImage(str(f))\r\n # sitk::ERROR: The file \"<ADL file: /clusters/DLTK_IXI_Dataset/2mm/IXI012/T1_2mm.nii.gz>\" does not exist.\r\n # sitk seems only read from local path....how to read from remote path????????\r\n # for short term download to local path\r\n # rpath is datalakestore, lpath is local file path both have the same root structure '/clusters/DLTK_IXI_Dataset/'\r\n multithread.ADLDownloader(adlsFileSystemClient, rpath=t1_fn, lpath=t1_fn, nthreads=5, chunksize=2**24, overwrite=True)\r\n img = sitk.ReadImage(str(t1_fn))\r\n # you need http://imagej.net/Fiji#Downloads app to show the img. More discussion and instruction: https://stackoverflow.com/questions/45682319/simpleitk-show-generates-error-in-imagej-on-linux\r\n ##sitk.Show(img)\r\n t1 = sitk.GetArrayFromImage(img)\r\n\r\n # Normalise volume image\r\n t1 = whitening(t1)\r\n images = np.expand_dims(t1, axis=-1).astype(np.float32)\r\n\r\n if mode == tf.estimator.ModeKeys.PREDICT:\r\n yield {'features': {'x': images}, 'img_id': subject_id}\r\n print('read_fn Predict')\r\n\r\n # Parse the sex classes from the file_references [1,2] and shift them\r\n # to [0,1]\r\n sex = np.int(f[1]) - 1\r\n y = np.expand_dims(sex, axis=-1).astype(np.int32)\r\n\r\n # Augment if used in training mode\r\n if mode == tf.estimator.ModeKeys.TRAIN:\r\n images = _augment(images)\r\n print('read_fn Train')\r\n # Check if the reader is supposed to return training examples or full images\r\n if params['extract_examples']:\r\n #print('read_fn params extract_examples')\r\n images = extract_random_example_array(\r\n image_list=images,\r\n example_size=params['example_size'],\r\n n_examples=params['n_examples'])\r\n for e in range(params['n_examples']):\r\n #print ('e: ', e)\r\n## yield {'features': {'x': images[e].astype(np.float32)},\r\n## 'labels': {'y': y.astype(np.float32)},\r\n## 'img_id': subject_id}\r\n image_array.append(images[e].astype(np.float32))\r\n label_array.append(y.astype(np.int32))\r\n else:\r\n print('read_fn params yield last')\r\n## yield {'features': {'x': images},\r\n## 'labels': {'y': y.astype(np.float32)},\r\n## 'img_id': subject_id}\r\n image_array.append(images)\r\n label_array.append(y.astype(np.int32))\r\n\r\n print(\"read_fn yield output_array with image shape = \", images.shape, \"label shape = \", y.shape)\r\n yield {'x': np.array(image_array), 'y': np.array(label_array)}",
"def packet_to_kml(packet, reader):\n\n try:\n src_ip = packet[IP].src\n src_kml = ip_to_kml(src_ip, reader)\n except:\n src_kml = None\n try:\n dest_ip = packet[IP].dest\n dest_kml = ip_to_kml(dest_ip, reader)\n except:\n dest_kml = None\n\n if src_kml is not None and dest_kml is not None:\n connect_kml = ips_to_line_kml(src_ip, dest_ip, reader)\n print(\"Added connection\")\n else:\n connect_kml = None\n\n return src_kml, dest_kml, connect_kml",
"def _gtTSmap(self):\n if os.path.isfile(self.outtsmap):\n # Already exists\n return\n\n if self.csys == 'GAL':\n center_icrs = SkyCoord(ra=self.ra*u.degree, dec=self.dec*u.degree, frame='icrs')\n self.ra = center_icrs.galactic.l.deg\n self.dec = center_icrs.galactic.b.deg\n\n model = os.path.join(self.workpath, 'TSmapModel.xml') \n rfil = open(self.outmodel, 'r')\n wfil = open(model, 'w')\n isSrc = False\n isDif = False\n for line in rfil:\n if (isSrc) and ('<source name' in line):\n # Arrived to a new source, restart copying\n isSrc = False\n if (isDif) and ('<source name' in line) and ('PointSource' in line):\n isDif = False\n if 'TARGET' in line:\n isSrc = True\n if ('<source name=\"gll_iem_v06\"' in line) or ('<source name=\"iso_source_v06\"' in line): \n isDif = True\n \n if isSrc:\n # Do not copy the Target model to make it appear in the TS map\n pass\n else:\n if isDif:\n # Leave Diffuse model normalizations free\n wfil.write(line)\n else:\n # Make sur the gtlike output source model has all source parameters fixed\n wfil.write(line.replace('free=\"1\"', 'free=\"0\"'))\n rfil.close()\n wfil.close()\n\n # Launch the gttsmap tool \n if self.mode == 'binned':\n os.popen(\"gttsmap evfile={} scfile={} bexpmap={} expcube={} cmap={} srcmdl={}\\\n outfile={} evtype={} irfs=CALDB optimizer=NewMinuit statistic=BINNED ftol=1e-2\\\n coordsys={} proj=AIT nxpix={} nypix={} binsz={} xref={} yref={}\".format(self.outmktime,\n self.ft2, self.outbinexp, self.outltcube, self.outbincub, model, self.outtsmap, self.evtype,\n self.csys, self.imwid, self.imwid, self.binsz, self.ra, self.dec))\n elif self.mode == 'unbinned':\n os.popen(\"gttsmap evfile={} scfile={} expmap={} expcube={} srcmdl={}\\\n outfile={} evtype={} irfs=CALDB optimizer=NewMinuit statistic=UNBINNED ftol=1e-2\\\n coordsys={} proj=AIT nxpix={} nypix={} binsz={} xref={} yref={}\".format(self.outmktime,\n self.ft2, self.outexpmap, self.outltcube, model, self.outtsmap, self.evtype,\n self.csys, self.imwid, self.imwid, self.binsz, self.ra, self.dec))\n else:\n return\n\n if self.csys == 'GAL':\n self.ra = center_icrs.ra.deg\n self.dec = center_icrs.dec.deg\n return",
"def normal_structure(feedback_folder_path):\n elems = os.listdir(feedback_folder_path)\n global mos_sim\n global mos_nat\n # ignore instruction text files\n for junk in [\"Anleitung.txt\", \"instructions.txt\", \".DS_Store\"]:\n if junk in elems: elems.remove(junk)\n # iterate score text files and update MOS dictionaries\n for elem in elems:\n for file in os.listdir(os.path.join(feedback_folder_path, elem)):\n if file.endswith('.txt'):\n filepath = os.path.join(feedback_folder_path, elem, file)\n code, nat_score, sim_score = score_filepath_to_scores(filepath)\n update_dicts(code, nat_score, sim_score)",
"def get_kml_document(kml_obj: fastkml.kml.KML) -> fastkml.Document:\n\t\n\treturn next(kml_obj.features())",
"def convert_nkjp(nkjp_path, output_dir):\n # Load XML NKJP\n print(\"Reading data from %s\" % nkjp_path)\n if os.path.isfile(nkjp_path) and (nkjp_path.endswith(\".tar.gz\") or nkjp_path.endswith(\".tgz\")):\n with tempfile.TemporaryDirectory() as nkjp_dir:\n print(\"Temporarily extracting %s to %s\" % (nkjp_path, nkjp_dir))\n with tarfile.open(nkjp_path, \"r:gz\") as tar_in:\n tar_in.extractall(nkjp_dir)\n\n subfolder_to_entities = load_xml_nkjp(nkjp_dir)\n elif os.path.isdir(nkjp_path):\n subfolder_to_entities = load_xml_nkjp(nkjp_path)\n else:\n raise FileNotFoundError(\"Cannot find either unpacked dataset or gzipped file\")\n converted = []\n for subfolder_name, pars in subfolder_to_entities.items():\n for par_id, par in pars.items():\n paragraph_identifier = f\"{subfolder_name}|{par_id}\"\n par_tokens = []\n for _, sent in par.items():\n tokens = sent.values()\n srt = sorted(tokens, key=lambda tok:tok[\"i\"])\n for token in srt:\n _ = token.pop(\"i\")\n _ = token.pop(\"seg_id\")\n par_tokens.append(token)\n par_tokens[0][\"paragraph_id\"] = paragraph_identifier\n converted.append(par_tokens)\n\n split = split_dataset(converted)\n\n for split_name, split in split.items():\n if split:\n with open(os.path.join(output_dir, f\"pl_nkjp.{split_name}.json\"), \"w\", encoding=\"utf-8\") as f:\n json.dump(split, f, ensure_ascii=False, indent=2)",
"def open_output_files(self):\n if not os.path.exists(self.outputDictionaryPath):\n os.makedirs(self.outputDictionaryPath)\n\n self.XMLfile = open(os.path.join(self.outputDictionaryPath, 'MyDictionary.xml'), 'w+', encoding='utf-8') # this is the output file\n self.Makefile = open(os.path.join(self.outputDictionaryPath, 'Makefile'), 'w+', encoding='utf-8')\n self.MyInfoFile = open(os.path.join(self.outputDictionaryPath, 'MyInfo.plist'), 'w+', encoding='utf-8')",
"def txt_to_xml(ws2ify_path, stage_dir):\r\n\r\n txt_file = get_file(stage_dir, \".txt\", override=True)\r\n obj_file = get_file(stage_dir, \".obj\", override=True)\r\n keyframe_easing_dict = {\"1\": \"LINEAR\", \"2\": \"EASED\"}\r\n\r\n # If both an obj and txt file exist, user can use ws2ify\r\n if txt_file and obj_file:\r\n\r\n while True:\r\n use_ws2ify = input(\"\\nTXT exists. Use ws2ify? (Y/N) \")\r\n if use_ws2ify:\r\n use_ws2ify = use_ws2ify.upper()[0]\r\n\r\n if use_ws2ify != \"Y\" and use_ws2ify != \"N\":\r\n print(\"\\nInvalid input.\")\r\n elif use_ws2ify == \"N\":\r\n return None\r\n else:\r\n break\r\n\r\n txt_file = get_file(stage_dir, \".txt\", override=False)\r\n obj_file = get_file(stage_dir, \".obj\", override=False)\r\n txt_path = os.path.join(stage_dir, txt_file)\r\n obj_path = os.path.join(stage_dir, obj_file)\r\n\r\n while True:\r\n keyframe_easing = input(\"\\nKeyframe easing = linear(1) or eased(2)?: \")\r\n if keyframe_easing != \"1\" and keyframe_easing != \"2\":\r\n print(\"\\nInvalid input.\")\r\n else:\r\n keyframe_easing = keyframe_easing_dict[keyframe_easing]\r\n break\r\n\r\n xml = \"{}.xml\".format(input(\"\\nOutput xml filename: \"))\r\n xml_path = os.path.join(stage_dir, xml)\r\n\r\n ws2ify_path = os.path.expanduser(ws2ify_path)\r\n os.chdir(ws2ify_path)\r\n subprocess.call([\"python\", \"run.py\", txt_path, obj_path, xml_path, keyframe_easing])\r\n os.chdir(config_writer.tool_path)\r\n\r\n return xml",
"def poly2kml(xy,fname=None,name='poly',color='00FF00', width=3,\n verbose=True):\n\n if fname is None:\n fname = name + '.kml'\n\n x,y = xy\n\n if verbose:\n print(\"Polygon: %10.6f %10.6f\" % (x[0],y[0]))\n for j in range(1,len(x)):\n print(\" %10.6f %10.6f\" % (x[j],y[j]))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x'] = x\n mapping['y'] = y\n mapping['elev'] = elev\n mapping['name'] = name\n d = \" x[0] = %s, y[0] = %s\\n\" % (x[0],y[0]) \n for j in range(1,len(x)):\n d = d + \" x[%i] = %s, y[%i] = %s\" % (j,f2s(x[j]),j,f2s(y[j]))\n mapping['desc'] = d\n mapping['color'] = color\n mapping['width'] = width\n\n v = \"\\n\"\n for j in range(len(x)):\n v = v + \"%s,%s,%s\\n\" % (f2s(x[j]),f2s(y[j]),f2s(elev))\n v = v + \"%s,%s,%s\\n\" % (f2s(x[0]),f2s(y[0]),f2s(elev))\n v.replace(' ','')\n \n region_text = kml_region(mapping, v)\n for j in range(1,len(x)):\n d = d + \" x[%i] = %s, y[%i] = %s\" % (j,f2s(x[j]),j,f2s(y[j]))\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def writeInput_for_LAMMPS(rd, listAtoms, filename):\n #f=open(\"geo.kirigami_d0.0_\"+str(rd),\"w+\")\n f=open(filename+str(rd),\"w+\")\n f.write(\"\\n\")\n f.write(\"%d atoms\\n\" %len(listAtoms))\n f.write(\"1 atom types\\n\")\n f.write(\"\\n\")\n f.write(\"%f\\t%f xlo xhi\\n\" %(xlo-1, xhi+1))\n f.write(\"%f\\t%f ylo yhi\\n\" %(ylo-1, yhi+1))\n f.write(\"%f\\t%f zlo zhi\\n\" %(zlo-1, zhi+1))\n f.write(\"\\n\")\n f.write(\"Atoms\\n\")\n f.write(\"\\n\")\n for i in range (len(listAtoms)):\n f.write(\"%d\\t1\\t%f\\t%f\\t%f\\n\" %(i+1, listAtoms[i][0], listAtoms[i][1], listAtoms[i][2]))\n f.close()",
"def load_graph(net_file):\n path, filename = os.path.split(net_file)\n net_name = os.path.splitext(filename)[0]\n # get full path\n path = os.path.abspath(path)\n pickle_dir = path + os.sep + \"cache\"\n if not os.path.isdir(pickle_dir):\n os.mkdir(pickle_dir)\n pickle_file = \"{0}/{1}.pickle\".format(pickle_dir, net_name)\n if (os.path.isfile(pickle_file) and\n os.stat(net_file).st_mtime < os.stat(pickle_file).st_mtime):\n # Pickle file exists, and source_file is older\n graph = nx.read_gpickle(pickle_file)\n else:\n # No pickle file, or is outdated\n graph = nx.read_gml(net_file)\n nx.write_gpickle(graph, pickle_file)\n # ANK only understands GML files cleaned by topzootools\n if 'Creator' in graph.graph:\n if graph.graph['Creator'] == \"Topology Zoo Toolset\":\n # Graph has been processed by topzootools into suitable \n # format for ank\n return graph\n elif graph.graph['Creator'] == ' \"yFiles\"':\n # Note yFiles has quotes and leading space after nx parsing\n #TODO: try and use topzootools module (if installed)\n # to do conversion\n # to a /tmp file\n LOG.warn(\"Using GML file exported from yED, \"\n \"Please use TopZooTools to convert yED GML file\"\n \" into Topology Zoo format for use in AutoNetkit\")\n #TODO: make this throw exception so that program exits\n return None\n else:\n #Unknown file creator, may be user manually created, but warn\n LOG.warn(\"Unknown GML file creator\")\n return graph\n else:\n # No creator specified\n return graph",
"def prepare_ozi(mbbox, mwidth, mheight, name, transform):\n def deg(value, is_lon):\n degrees = math.floor(abs(value))\n minutes = (abs(value) - degrees) * 60\n return '{:4d},{:3.5F},{}'.format(\n int(round(degrees)), minutes,\n ('W' if is_lon else 'S') if value < 0 else ('E' if is_lon else 'N'))\n\n ozipoint = ('Point{:02d},xy, , ,in, deg, , ,N, , ,E' +\n ', grid, , , ,N')\n bbox = transform.backward(mbbox)\n points = \"\\n\".join([ozipoint.format(n) for n in range(3, 31)])\n header = '''OziExplorer Map Data File Version 2.2\nNik4\n{name}\n1 ,Map Code,\nWGS 84,WGS 84, 0.0000, 0.0000,WGS 84\nReserved 1\nReserved 2\nMagnetic Variation,,,E\nMap Projection,Mercator,PolyCal,No,AutoCalOnly,No,BSBUseWPX,No\nPoint01,xy, 0, 0,in, deg,{top},{left}, grid, , , ,N\nPoint02,xy, {width:4d}, {height:4d},in, deg,{bottom},{right}, grid, , , ,N\n{points}\nProjection Setup,,,,,,,,,,\nMap Feature = MF ; Map Comment = MC These follow if they exist\nTrack File = TF These follow if they exist\nMoving Map Parameters = MM? These follow if they exist\nMM0,Yes\nMMPNUM,4\nMMPXY,1,0,0\n'''.format(name=name,\n top=deg(bbox.maxy, False),\n left=deg(bbox.minx, True),\n width=mwidth - 1,\n height=mheight - 1,\n bottom=deg(bbox.miny, False),\n right=deg(bbox.maxx, True),\n points=points)\n return ''.join([\n header,\n \"MMPXY,2,{},0\\n\".format(mwidth),\n \"MMPXY,3,{},{}\\n\".format(mwidth, mheight),\n \"MMPXY,4,0,{}\\n\".format(mheight),\n 'MMPLL,1,{:4.6f},{:4.6f}\\n'.format(bbox.minx, bbox.maxy),\n 'MMPLL,2,{:4.6f},{:4.6f}\\n'.format(bbox.maxx, bbox.maxy),\n 'MMPLL,3,{:4.6f},{:4.6f}\\n'.format(bbox.maxx, bbox.miny),\n 'MMPLL,4,{:4.6f},{:4.6f}\\n'.format(bbox.minx, bbox.miny),\n \"MM1B,{}\\n\".format((mbbox.maxx - mbbox.minx) / mwidth * math.cos(\n math.radians(bbox.center().y))),\n \"MOP,Map Open Position,0,0\\n\",\n \"IWH,Map Image Width/Height,{},{}\\n\".format(mwidth, mheight),\n ])",
"def convertIcdar2013Localization(dataDir, outPrefix, objectives, imgExt='jpg',\n gtPrefix='gt_', gtExt='txt'):\n\n imgFileList = [ff for ff in os.listdir(dataDir)\n if re.search('.'+imgExt+'$', ff)]\n gtFileList = [ff for ff in os.listdir(dataDir)\n if re.search('^'+gtPrefix+'\\w*.'+gtExt+'$', ff)]\n wordList = getIcdar2013WordList(dataDir, gtFileList)\n\n lenList, charMat = wordsToChars(wordList)\n outFilenames = makeLabelFiles(objectives, dataDir, imgFileList, lenList,\n charMat, outPrefix)\n return outFilenames",
"def read_flat_map(filename,i_map=0) :\n hdul=fits.open(filename)\n w=WCS(hdul[0].header)\n\n maps=hdul[i_map].data\n ny,nx=maps.shape\n\n return w,maps",
"def read_layout(outFile=None, linked=False, append=False):\n from cgl.plugins.blender.lumbermill import scene_object, LumberObject, import_file\n from cgl.core.utils.read_write import load_json\n import bpy\n\n if outFile == None:\n outFileObject = scene_object().copy(ext='json', task='lay', user='publish').latest_version()\n outFileObject.set_attr(filename='%s_%s_%s.%s' % (outFileObject.seq,\n outFileObject.shot,\n outFileObject.task,\n 'json'\n ))\n outFile = outFileObject.path_root\n # outFile = scene_object().path_root.replace(scene_object().ext, 'json')\n\n\n\n data = load_json(outFile)\n\n for p in data:\n print(p)\n data_path = data[p]['source_path']\n blender_transform = data[p]['blender_transform']\n\n transform_data = []\n for value in blender_transform:\n transform_data.append(value)\n\n print(transform_data)\n\n pathToFile = os.path.join(scene_object().root, data_path)\n lumberObject = LumberObject(pathToFile)\n\n\n\n if lumberObject.filename in bpy.data.libraries:\n lib = bpy.data.libraries[lumberObject.filename]\n bpy.data.batch_remove(ids=([lib]))\n import_file(lumberObject.path_root, linked=linked, append=append)\n else:\n import_file(lumberObject.path_root, linked=linked, append=append)\n\n if p not in bpy.context.collection.objects:\n obj = bpy.data.objects.new(p, None)\n bpy.context.collection.objects.link(obj)\n obj.instance_type = 'COLLECTION'\n obj.instance_collection = bpy.data.collections[lumberObject.asset]\n obj.location = (transform_data[0], transform_data[1], transform_data[2])\n obj.rotation_euler = (transform_data[3], transform_data[4], transform_data[5])\n obj.scale = (transform_data[6], transform_data[7], transform_data[8])\n\n bpy.ops.file.make_paths_relative()",
"def ks2_to_alf(ks_path, out_path, sr=30000, nchannels=385, label=None, force=True):\n m = model.TemplateModel(dir_path=ks_path,\n dat_path=[],\n sample_rate=sr,\n n_channels_dat=nchannels)\n ac = alf.EphysAlfCreator(m)\n ac.convert(out_path, label=label, force=force)",
"def get_kml_dict(self,name,filename):\n\n lon1,lon2,lat1,lat2=self.get_bounds()\n d={'lat1':lat1,'lat2':lat2,'lon1':lon1,'lon2':lon2, \\\n 'name':name,'filename':filename,'time':self.get_time()}\n return d",
"def _convert(self):\n\n json_data = xjson.loads(self.jfile_path)\n\n with io.open(self.yfile_path, 'w', encoding='utf8') as f:\n yaml.dump(json_data, f, default_flow_style=False, allow_unicode=True)",
"def fileOpen(self):\r\n if(self.dataController.status == self.dataController.playing):\r\n self.showMidsagittalView()\r\n self.dataController.stop()\r\n dir = os.path.dirname(unicode(self.kinfilename)) \\\r\n if self.kinfilename is not None else \".\"\r\n self.kinfilename = QtCore.QString(QtGui.QFileDialog.getOpenFileName(self,\r\n \"Visualization Tool - Choose Kinematic File\", dir,\r\n \"TSV files (*.tsv)\"))\r\n if(self.kinfilename == QtCore.QString()):\r\n return\r\n newkinfilename = copy.deepcopy(self.kinfilename)\r\n kinfileEnd = QtCore.QRegExp(\"_BPC.tsv\")\r\n self.audiofilename = newkinfilename.replace(kinfileEnd,'.wav')\r\n self.audiofilename = self.findAudioFile(unicode(self.kinfilename))\r\n if self.audiofilename is None:\r\n QtGui.QMessageBox.warning(self,'Cannot Find Audio File',\r\n \"The corresponding audio file (*.wav) could not be found.\"\r\n \"<p>Please select the corresponding file.\",\r\n QtGui.QMessageBox.Ok, QtGui.QMessageBox.NoButton)\r\n self.audiofilename = QtCore.QString(QtGui.QFileDialog.getOpenFileName(self,\r\n \"Visualization Tool - Choose Audio File\", dir,\r\n \"WAV files (*.wav)\"))\r\n if (self.audiofilename):\r\n self.dataController.onFileLoaded(unicode(self.kinfilename),unicode(self.audiofilename))\r\n self.updateStatus(\"File %s loaded\" % unicode(self.kinfilename))\r\n self.showMidsagittalView()\r\n self.showTrajectory = False\r\n self.imageSavingDir = None\r\n self.textSavingDir = None\r\n# self.dataController.stop()\r\n for action, check in self.resetableActions:\r\n action.setChecked(check)\r\n else:\r\n return",
"def kaldi2dag(self, file_path):\n raise NotImplementedError",
"def convert(\n kml_path_or_buffer: str | pl.Path | TextIO | BinaryIO,\n feature_collection_name: Optional[str] = None,\n style_type: Optional[str] = None,\n *,\n separate_folders: bool = False,\n):\n # Read KML\n if isinstance(kml_path_or_buffer, (str, pl.Path)):\n kml_path_or_buffer = pl.Path(kml_path_or_buffer).resolve()\n with kml_path_or_buffer.open(encoding=\"utf-8\", errors=\"ignore\") as src:\n kml_str = src.read()\n else:\n kml_str = kml_path_or_buffer.read()\n kml_path_or_buffer.close()\n\n # Parse KML\n root = md.parseString(kml_str)\n\n # Build GeoJSON layers\n if separate_folders:\n result = build_layers(root)\n else:\n result = [build_feature_collection(root, name=feature_collection_name)]\n\n if style_type is not None:\n # Build style dictionary\n if style_type not in STYLE_TYPES:\n raise ValueError(f\"style type must be one of {STYLE_TYPES}\")\n else:\n builder_name = f\"build_{style_type}_style\"\n style_dict = globals()[builder_name](root)\n result = style_dict, *result\n\n return result",
"def XML_EC_PL(Name, InputsFile, OutputFile, emin,emax):\n\n\t#On commence par afficher ce qu'on fait\r\n\tprint \" Build xml file \"\r\n\r\tprint InputsFile\n\t#ouverture du fichier dans lequel on place le source model\n\ttry:\n\t\tfresult = open(OutputFile, 'w')\n\texcept:\n\t\tprint \"Coucou\"\r\n \t#ecriture des premieres lignes invariantes\n\tfresult.write('<?xml version=\"1.0\" ?>')\r\n\tfresult.write(\"<source_library title=\\\"source library\\\">\\n\")\n\r\n \t#ouverture du fichier avec les entrees\r\n\tf = open(InputsFile,\"r\")\r\n\tlines = f.readlines()\r\n\t\r\n \t#Ajout des sources detectees dans le catalogue\n\t#Pour chaque ligne du fichier d'entree\r\n\tfor line in range(len(lines)):\n\t\t#Lire les donnees de la ligne\t\t\r\n\t\tdata = lines[line].split()\r\n\t\tname = data[0]\n\n\t\t#Verification : est on en train de traiter la source que l'on veut etudier ou une autre ?\r\n\t\tif str(name) == Name :\r\n\t\t\tmysource = 1\r\n\t\telse:\r\n\t\t\tmysource = 0\n\n\t\t#recuperation des donnees\r\n\t\tRA = data[1]\r\n\t\tDEC = data[2]\r\n\t\tIntegral = float(data[3])*float(Frac)\r\n\t\tGamma= data[4]\n\n\t\t\r\n\t\ttry:\n\t\t\t#essai de definition des donnees pour un PL avec ExpCut\n\t\t\tPrefactor = float(data[5])*float(Frac)\r\n\t\t\tEnergy = float(data[6])\r\n\t#\t\tPrefactor = Prefactor/pow(Energy/100., float(Gamma)) #Densite de flux calculee a Epivot\r\n\t#\t\tPrefactor = Prefactor*pow(1000./100., float(Gamma)) #We do the calculation with (E/1000.)^Gamma\n\t\t\tvariabilite=float(data[8])\n\n#\t\t\tprint variabilite\n\n\n\n\r\n\t\t\tcut = float(data[7]) # Cut est la variable qui nous permettra de savoir si il faut utiliser un cut off (1) ou une loi de puissance normale (2)\r\n\t\texcept:\r\n\t\t\ttry:\r\n\t\t\t\tcut = float(data[5])\r\n\t\t\texcept:\r\n\t\t\t\tprint \" Wrong size of list \"\r\n\t\t\t\tsys.exit()\r\n \t#Si on considere un ccut off exponentiel pour la source :\r\n\t\tif cut == 1:\n\t\t\t#ecriture du nom de la source consideree\r\n\t\t\tresult_line=\" <source \"\r\n\t\t\tresult_line += \"name=\\\"\"+name+\"\\\"\"\r\n\t\t\tresult_line += \" type=\\\"PointSource\\\">\\n\"\r\n\t\t\tspectrum_type = \"PLSuperExpCutoff\"\n\t\t\t#Utilisation de la modelisation PLSuperExpCutoff car plus simple et plus intuitive pour nous et pour la modelisation des pulsars si il faut en modeliser\n\r\n\t\t\t#definition des parametres spectraux a prendre en comtpe et de la chaine de caractere a integrer\r\n\n\n\n\t\t\tif variabilite==0.0 or variabilite==2.0:\n\t\t\t\tspectrum_lines = \" <parameter free=\\\"0\\\" max=\\\"10000000.0\\\" min=\\\"0.0000001\\\"\"\n\n\t\t\t\t#d'ou vient ce 1e-12\r\n\t\t\t\tIntegral = float(Prefactor)*1.0e10\r\n\t\t\t\tscale = 1.0e-10\n\r\n\t\t\t\tspectrum_lines += \" name=\\\"Prefactor\\\" scale=\\\"\"+str(scale)+\"\\\" value=\\\"\"\r\n\t\t\t\tspectrum_lines += str(Integral)+\"\\\" />\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"1\\\" max=\\\"5.0\\\" min=\\\"0.\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Index1\\\" scale=\\\"-1.0\\\" value=\\\"\"\r\n\t\t\t\tspectrum_lines += str(Gamma)+\"\\\"/>\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"20000.0\\\" min=\\\"1.0\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Scale\\\" scale=\\\"1.0\\\" value=\\\"\"+str(Energy)+\"\\\"/>\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"1\\\" max=\\\"100.0\\\" min=\\\"0.001\\\"\"\n\t\t\t\tspectrum_lines += \" name=\\\"Cutoff\\\" scale=\\\"1000.0\\\" value=\\\"30.0\\\"/>\\n\"\n\r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"5.0\\\" min=\\\"0.0\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Index2\\\" scale=\\\"1.0\\\" value=\\\"1.0\\\"/>\\n\"\n\t\t\telif variabilite==1.0 :\n\t\t\t\tspectrum_lines = \" <parameter free=\\\"1\\\" max=\\\"10000000.0\\\" min=\\\"0.0\\\"\"\n\n\t\t\t\t#d'ou vient ce 1e-12\r\n\t\t\t\tIntegral = float(Prefactor)*1.0e10\r\n\t\t\t\tscale = 1.0e-10\n\n\t\t\t\tspectrum_lines += \" name=\\\"Prefactor\\\" scale=\\\"\"+str(scale)+\"\\\" value=\\\"\"\r\n\t\t\t\tspectrum_lines += str(Integral)+\"\\\" />\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"1\\\" max=\\\"5.0\\\" min=\\\"0.\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Index1\\\" scale=\\\"-1.0\\\" value=\\\"\"\r\n\t\t\t\tspectrum_lines += str(Gamma)+\"\\\"/>\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"20000.0\\\" min=\\\"1.0\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Scale\\\" scale=\\\"1.0\\\" value=\\\"\"+str(Energy)+\"\\\"/>\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"1\\\" max=\\\"100.0\\\" min=\\\"0.0001\\\"\"\r\t\t\t\tspectrum_lines += \" name=\\\"Cutoff\\\" scale=\\\"1000.0\\\" value=\\\"30.0\\\"/>\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"5.0\\\" min=\\\"0.0\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Index2\\\" scale=\\\"1.0\\\" value=\\\"1.0\\\"/>\\n\"\n\n\r\n \r\n\n# <spectrum type=\"PLSuperExpCutoff\">\n# <parameter free=\"1\" max=\"100000\" min=\"0\" name=\"Prefactor\" scale=\"1e-10\" value=\"Prefactor*1e-10\"/>\n# <parameter free=\"1\" max=\"0\" min=\"5\" name=\"Index1\" scale=\"-1\" value=\"valeur du catalogue\"/>\n# <parameter free=\"0\" max=\"20000\" min=\"1.0\" name=\"Scale\" scale=\"1\" value=\"Epivot\"/>\n# <parameter free=\"1\" max=\"300000\" min=\"100\" name=\"Cutoff\" scale=\"1\" value=\"3000\"/>\n# <parameter free=\"0\" max=\"5\" min=\"0\" name=\"Index2\" scale=\"1\" value=\"1.5\"/>\n# </spectrum>\n\n\r\n\t\telse:\n\t\t#Sinon (si on considere une loi de puissance simple)\n\t\t#definition de la chaine de caractere comportant le nom de la source\r\n\t\t\tresult_line=\" <source \"\r\n\t\t\tresult_line += \"name=\\\"\"+name+\"\\\"\"\n\t\t\tif mysource == 0:\r\t\t\t\tresult_line += \" type=\\\"PointSource\\\">\\n\"\n\t\t\telse:\n\t\t\t\tresult_line += \" type=\\\"PointSource\\\">\\n\"\t\t\t\t\n\n\t\t\t#definition de la chaine de caractere correspondant a la forme de fit que l'on souhaite utiliser (Loi de puissance)\r\n\t\t\tspectrum_type = \"PowerLaw2\"\r\n\r\n\t\t\tif mysource == 0 and variabilite!=1.0:\n\t\t\t#si ce n'est pas la source que l'on etudie on fige le parametre Integrale\n\t\t\t\tspectrum_lines = \" <parameter free=\\\"0\\\" max=\\\"1000000.0\\\" min=\\\"0.0\\\"\"\r\n\t\t\telse:\n\t\t\t#sinon on le libere\r\n\t\t\t\tspectrum_lines = \" <parameter free=\\\"1\\\" max=\\\"1000000.0\\\" min=\\\"0.0\\\"\"\n\n\n\n\n\n\t\t\t#Toujours ce facteur....\r\n\t\t\tIntegral = float(Integral)*1e10\r\n\t\t\tscale = 1e-10\n\n\n\t\n\r\n\t\t\tspectrum_lines += \" name=\\\"Integral\\\" scale=\\\"\"+str(scale)+\"\\\" value=\\\"\"\r\n\t\t\tspectrum_lines += str(Integral)+\"\\\" />\\n\"\n\r\n\t\t\tif mysource == 0 and variabilite!=1.0:\n\t\t\t\t#si ce n'est pas la source que l'on etudie on fige le parametre gamma\r\n\t\t \t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"5.0\\\" min=\\\"0.\\\"\"\r\n\t\t\telse:\n\t\t\t\t#si c'est pas la source que l'on etudie on le laisse libre\r\n\t\t \t\tspectrum_lines += \" <parameter free=\\\"1\\\" max=\\\"5.0\\\" min=\\\"0.\\\"\"\n\n\t\t\t#fin de la chaine de parametres sur le modele spectral\r\n\t\t\tspectrum_lines += \" name=\\\"Index\\\" scale=\\\"-1.0\\\" value=\\\"\"\r\n\t\t\tspectrum_lines += str(Gamma)+\"\\\"/>\\n\"\r\n \r\n\t\t\tif mysource == 0 and variabilite!=1.0:\n\t \n\t\t\t spectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"200000.0\\\" min=\\\"20.0\\\"\"\r\n\t\t\t spectrum_lines += \" name=\\\"LowerLimit\\\" scale=\\\"1.0\\\" value=\\\"1000.0\\\"/>\\n\"\r\n \r\n\t\t\t spectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"1000000.0\\\" min=\\\"20.0\\\"\"\r\n\t\t\t spectrum_lines += \" name=\\\"UpperLimit\\\" scale=\\\"1.0\\\" value=\\\"100000.0\\\"/>\\n\"\n\t\t\telse:\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"200000.0\\\" min=\\\"20.0\\\"\"\n\t\t\t\tspectrum_lines += \" name=\\\"LowerLimit\\\" scale=\\\"1.0\\\" value=\\\"100\\\"/>\\n\"\n\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"100000.0\\\" Min =\\\"20.0\\\"\"\n\t\t\t\tspectrum_lines += \" name=\\\"UpperLimit\\\" scale=\\\"1.0\\\" value=\\\"100000.0\\\"/>\\n\"\n\n \t\t#ajout du modele spectral a la liste de parametres \r\n\t\tresult_line += \" <spectrum type=\\\"\"+spectrum_type+\"\\\">\\n\"\r\t\tresult_line += spectrum_lines\r\n\t\tresult_line += \" </spectrum>\\n\"\n\n\t\t\n\n\t\tif mysource==0 and variabilite!=1.0:\n \t\t\t#ajout du modele spatial a la liste de parametres \r\n\t\t\tresult_line += \" <spatialModel type=\\\"SkyDirFunction\\\">\\n\"\r\n\t\t\tresult_line += \" <parameter free=\\\"0\\\" max=\\\"360\\\" min=\\\"-360\\\"\"\r\n\t\t\tresult_line += \" name=\\\"RA\\\" scale=\\\"1\\\" value=\\\"\"+RA+\"\\\"/>\\n\"\r\n\t\t\tresult_line += \" <parameter free=\\\"0\\\" max=\\\"90\\\" min=\\\"-90\\\"\"\r\n\t\t\tresult_line += \" name=\\\"DEC\\\" scale=\\\"1\\\" value=\\\"\"+DEC+\"\\\"/>\\n\"\r\n\t\t\tresult_line += \" </spatialModel>\\n\"\n\t\telif mysource==0 and variabilite==1.0:\n \t\t\t#ajout du modele spatial a la liste de parametres \r\n\t\t\tresult_line += \" <spatialModel type=\\\"SkyDirFunction\\\">\\n\"\r\n\t\t\tresult_line += \" <parameter free=\\\"1\\\" max=\\\"360\\\" min=\\\"-360\\\"\"\r\n\t\t\tresult_line += \" name=\\\"RA\\\" scale=\\\"1\\\" value=\\\"\"+RA+\"\\\"/>\\n\"\r\n\t\t\tresult_line += \" <parameter free=\\\"1\\\" max=\\\"90\\\" min=\\\"-90\\\"\"\r\n\t\t\tresult_line += \" name=\\\"DEC\\\" scale=\\\"1\\\" value=\\\"\"+DEC+\"\\\"/>\\n\"\r\n\t\t\tresult_line += \" </spatialModel>\\n\"\n\t\telse:\n #ajout du modele spatial a la liste de parametres \n\t\t\tresult_line += \" <spatialModel type=\\\"SkyDirFunction\\\">\\n\"\n\t\t\tresult_line += \" <parameter free=\\\"1\\\" max=\\\"360\\\" min=\\\"-360\\\"\"\n\t\t\tresult_line += \" name=\\\"RA\\\" scale=\\\"1\\\" value=\\\"\"+RA+\"\\\"/>\\n\"\n\t\t\tresult_line += \" <parameter free=\\\"1\\\" max=\\\"90\\\" min=\\\"-90\\\"\"\n\t\t\tresult_line += \" name=\\\"DEC\\\" scale=\\\"1\\\" value=\\\"\"+DEC+\"\\\"/>\\n\"\n\t\t\tresult_line += \" </spatialModel>\\n\"\n\t\t\t\n\t\tresult_line += \" </source>\\n\"\r\n\t\tfresult.write(result_line+\"\\n\")\r\n #Ajout du fond diffus galactique\n\tresult_line=\" <source \"\r\n\tresult_line += \"name=\\\"gal_v02\\\"\"\r\n\tresult_line += \" type=\\\"DiffuseSource\\\">\\n\"\r\n\tspectrum_type = \"ConstantValue\"\r\n\r\n\tspectrum_lines = \" <parameter free=\\\"1\\\" max=\\\"10.0\\\" min=\\\"0\\\"\"\r\n\tspectrum_lines += \" name=\\\"Value\\\" scale=\\\"1.0\\\" value=\\\"\"+str(Frac)+\"\\\" />\\n\"\r\n\r\n\tresult_line += \" <spectrum type=\\\"\"+spectrum_type+\"\\\">\\n\"\r\n\tresult_line += spectrum_lines\r\n\tresult_line += \" </spectrum>\\n\"\r\n\r\n\tresult_line += \" <spatialModel file=\\\"/nfs/farm/g/glast/u31/marianne/VelaX/July09_Pointed/gll_iem_v02.fit\\\" type=\\\"MapCubeFunction\\\">\\n\"\r\n\tresult_line += \" <parameter free=\\\"0\\\" max=\\\"1000.0\\\" min=\\\"0.0\\\"\"\r\n\tresult_line += \" name=\\\"Normalization\\\" scale=\\\"1\\\" value=\\\"1.0\\\"/>\\n\"\r\n\tresult_line += \" </spatialModel>\\n\"\r\n\tresult_line += \" </source>\\n\"\r\n\tfresult.write(result_line+\"\\n\")\r\n\r\n \t#Ajout du fond diffus extragalactique\r\n\tresult_line=\" <source \"\r\n\tresult_line += \"name=\\\"eg_v02\\\"\"\r\n\tresult_line += \" type=\\\"DiffuseSource\\\">\\n\"\r\n\tspectrum_type = \"FileFunction\"\r\n\r\tspectrum_lines = \" <parameter free=\\\"1\\\" max=\\\"10.0\\\" min=\\\"0\\\"\"\r\n\tspectrum_lines += \" name=\\\"Normalization\\\" scale=\\\"1.0\\\" value=\\\"\"+str(Frac)+\"\\\" />\\n\"\r\n\r\n\tresult_line += \" <spectrum file=\\\"/nfs/farm/g/glast/u31/marianne/VelaX/July09_Pointed/isotropic_iem_v02.txt\\\" type=\\\"\"+spectrum_type+\"\\\">\\n\"\r\n\tresult_line += spectrum_lines\r\n\tresult_line += \" </spectrum>\\n\"\r\n \r\n\tresult_line += \" <spatialModel type=\\\"ConstantValue\\\">\\n\"\r\n\tresult_line += \" <parameter free=\\\"0\\\" max=\\\"100.0\\\" min=\\\"0.0\\\"\"\r\n\tresult_line += \" name=\\\"Value\\\" scale=\\\"1\\\" value=\\\"1.0\\\"/>\\n\"\r\n\tresult_line += \" </spatialModel>\\n\"\r\n\tresult_line += \" </source>\\n\"\r\n\tfresult.write(result_line+\"\\n\")\r\n\n \t#Fermeture des fichiers \r\n\tf.close() \r\n\tfresult.write(\"\\n</source_library>\\n\")\r\n\tfresult.close()\r\n\treturn",
"def main():\n LESSONS_PATH = os.path.join(LESSON_LOCATOR_DATA, LESSON_SETS[0])\n ORIGINAL_LESSONS_PATH = os.path.join(LESSONS_PATH, \"original\")\n ANNOTATED_LESSONS_PATH = os.path.join(LESSONS_PATH, \"annotated\")\n\n if not os.path.exists(ANNOTATED_LESSONS_PATH):\n os.mkdir(ANNOTATED_LESSONS_PATH)\n\n print(\"Scanning original lessons in %s...\" % ORIGINAL_LESSONS_PATH)\n\n for item in os.listdir(ORIGINAL_LESSONS_PATH):\n if item == \".DS_Store\": continue\n\n print(\" found: %s\" % item)\n\n item_path = os.path.join(ORIGINAL_LESSONS_PATH, item)\n\n lesson_number = None\n lesson_description = None\n mobj = re.search(r'^AY\\s+(\\d+)\\s*-\\s*(.+)\\.txt$', item)\n if mobj:\n lesson_number = mobj.group(1)\n lesson_description = mobj.group(2)\n\n print(\" number: %s\" % lesson_number)\n print(\" description: %s\" % lesson_description)\n\n lesson = dict()\n lesson['number'] = lesson_number\n lesson['description'] = lesson_description\n\n fh = open(item_path)\n lesson_raw_text = fh.read()\n fh.close()\n lesson_text = re.split(r'\\n', lesson_raw_text)\n# lesson_raw_text_reencoded = lesson_raw_text.decode('mac-roman').encode('utf-8')\n# lesson_text = re.split(r'\\n', lesson_raw_text_reencoded)\n\n lesson['text'] = lesson_text\n lesson['parsed'] = parseLesson(lesson_text)\n\n if lesson['parsed']['end_of_lesson'] is None:\n print(\" lesson has no 'end of lesson' marker\")\n\n lesson_json = json.dumps(lesson, indent=4)\n annotated_lesson_path = os.path.join(ANNOTATED_LESSONS_PATH, \"ay_%04d.json\" % int(lesson_number))\n fh = open(annotated_lesson_path, \"w\")\n fh.write(lesson_json)\n fh.close()\n\n else:\n print(\"ERROR: File name not understood: %s\" % item)\n\n return 0",
"def process_A_2019_ADMET_DMPK():\n\n fname = f\"{DATA_PATH}/A.2019.ADMET_DMPK.csv\"\n fout_1 = f\"{PROCESSED_PATH}/A.2019.ADMET_DMPK.SSF.smi\"\n fout_2 = f\"{PROCESSED_PATH}/A.2019.ADMET_DMPK.CS.smi\"\n logging.info(f\"Processing {fname}\")\n\n try:\n import pubchempy as pcp\n except ModuleNotFoundError as e:\n print(e)\n return\n\n with open(fname, 'r') as fin, open(fout_1, 'w') as fout1, open(fout_2, 'w') as fout2:\n fin.readline()\n for line in fin:\n if line.startswith(\"\\\"\"):\n pairs = line.rstrip().split(\"\\\"\")\n name = pairs[1]\n pairs = pairs[2].split(',')\n logS0_SFF = pairs[0]\n logS0_CS = pairs[2]\n else:\n pairs = line.rstrip().split(',')\n name = pairs[0]\n logS0_SFF = pairs[1]\n logS0_CS = pairs[3]\n\n name = name.replace('\\\"', '')\n results = pcp.get_compounds(name, 'name')\n if len(results) > 0:\n isomeric_smiles = results[0].isomeric_smiles\n canon_smiles = canonicalize_smiles(isomeric_smiles)\n fout1.write(\"{},{}\\n\".format(canon_smiles, logS0_SFF))\n fout2.write(\"{},{}\\n\".format(canon_smiles, logS0_CS))",
"def parse_kmz(filename: str) -> List[Tuple[float, float]]:\n kmz = ZipFile(filename, \"r\")\n kml = kmz.open(\"doc.kml\", \"r\").read()\n\n tree = etree.parse(BytesIO(kml))\n\n coordinates = tree.xpath(\n \"/a:kml/a:Document/a:Placemark/a:LineString/a:coordinates\",\n namespaces={\"a\": \"http://www.opengis.net/kml/2.2\"},\n )[0].text\n\n # geopy expects coordinate in the (long, lat) format\n coords = [\n (float(y[1]), float(y[0]))\n for y in [\n x.strip().split(\",\") for x in coordinates.split(\"\\n\") if len(x.strip())\n ]\n ]\n\n return coords",
"def process_cvat_xml(xml_file, image_dir, output_dir,username,password,ilabels):\n KNOWN_TAGS = {'box', 'image', 'attribute'}\n\n if (image_dir is None):\n image_dir=os.path.join(output_dir,\"data/obj\")\n os.makedirs(image_dir, exist_ok=True)\n\n os.makedirs(output_dir, exist_ok=True)\n cvat_xml = etree.parse(xml_file)\n basename = os.path.splitext( os.path.basename( xml_file ) )[0]\n current_labels = {}\n traintxt = \"\"\n auto_lbl_count = 0\n\n if (ilabels is not None):\n vlabels=ilabels.split(',')\n for _label in vlabels:\n current_labels[_label]=auto_lbl_count\n auto_lbl_count+=1\n\n tracks= cvat_xml.findall( './/track' )\n\n if (tracks is not None) and (len(tracks) > 0):\n frames = {}\n\n for track in tracks:\n trackid = int(track.get(\"id\"))\n label = track.get(\"label\")\n boxes = track.findall( './box' )\n for box in boxes:\n frameid = int(box.get('frame'))\n outside = int(box.get('outside'))\n #occluded = int(box.get('occluded')) #currently unused\n #keyframe = int(box.get('keyframe')) #currently unused\n xtl = float(box.get('xtl'))\n ytl = float(box.get('ytl'))\n xbr = float(box.get('xbr'))\n ybr = float(box.get('ybr'))\n\n frame = frames.get( frameid, {} )\n\n if outside == 0:\n frame[ trackid ] = { 'xtl': xtl, 'ytl': ytl, 'xbr': xbr, 'ybr': ybr, 'label': label }\n\n frames[ frameid ] = frame\n\n width = int(cvat_xml.find('.//original_size/width').text)\n height = int(cvat_xml.find('.//original_size/height').text)\n\n taskid = int(cvat_xml.find('.//task/id').text)\n\n urlsegment = cvat_xml.find(\".//segments/segment/url\").text\n urlbase = urlsegment.split(\"?\")[0]\n\n httpclient = requests.session()\n httpclient.get(urlbase)\n\n csrftoken = \"none\"\n sessionid = \"none\"\n\n # Spit out a list of each object for each frame\n for frameid in sorted(frames.keys()):\n image_name = \"%s_%08d.jpg\" % (basename, frameid)\n image_path = os.path.join(image_dir, image_name)\n if not os.path.exists(image_path):\n if username is None:\n log.warn('{} image cannot be found. Is `{}` image directory correct?\\n'.format(image_path, image_dir))\n else:\n log.info('{} image cannot be found. Downloading from task ID {}\\n'.format(image_path, taskid))\n\n if sessionid == \"none\":\n if \"csrftoken\" in httpclient.cookies:\n csrftoken = httpclient.cookies[\"csrftoken\"]\n elif \"csrf\" in httpclient.cookies:\n csrftoken = httpclient.cookies[\"csrf\"]\n\n login_data = dict(username=username, password=password,\n csrfmiddlewaretoken=csrftoken, next='/dashboard')\n\n urllogin = urlbase+\"/auth/login\"\n httpclient.post(urllogin, data=login_data,\n headers=dict(Referer=urllogin))\n\n if (\"sessionid\" in httpclient.cookies):\n sessionid = httpclient.cookies[\"sessionid\"]\n\n url = urlbase+\"/api/v1/tasks/\"+str(taskid)+\"/frames/\"+ str(frameid)\n\n req = httpclient.get(url, headers=dict(\n csrftoken=csrftoken, sessionid=sessionid))\n\n with open(image_path, 'wb') as fo:\n fo.write(req.content)\n print('Url saved as %s\\n' % image_path)\n\n\n frame = frames[frameid]\n\n _yoloAnnotationContent=\"\"\n\n objids = sorted(frame.keys())\n\n for objid in objids:\n\n box = frame[objid]\n\n label = box.get('label')\n xmin = float(box.get('xtl'))\n ymin = float(box.get('ytl'))\n xmax = float(box.get('xbr'))\n ymax = float(box.get('ybr'))\n\n if not label in current_labels:\n current_labels[label] = auto_lbl_count\n auto_lbl_count+=1\n\n labelid=current_labels[label]\n yolo_x= (xmin + ((xmax-xmin)/2))/width\n yolo_y= (ymin + ((ymax-ymin)/2))/height\n yolo_w = (xmax - xmin) / width\n yolo_h = (ymax - ymin) / height\n\n if len(_yoloAnnotationContent) != 0:\n _yoloAnnotationContent += \"\\n\"\n\n _yoloAnnotationContent+=str(labelid)+\" \"+\"{:.6f}\".format(yolo_x) +\" \"+\"{:.6f}\".format(yolo_y) +\" \"+\"{:.6f}\".format(yolo_w) +\" \"+\"{:.6f}\".format(yolo_h)\n anno_name = os.path.basename(os.path.splitext(image_name)[0] + '.txt')\n anno_path = os.path.join(image_dir, anno_name)\n\n _yoloFile = open(anno_path, \"w\", newline=\"\\n\")\n _yoloFile.write(_yoloAnnotationContent)\n _yoloFile.close()\n\n if len(traintxt)!=0:\n traintxt+=\"\\n\"\n\n traintxt+=image_path\n\n else:\n for img_tag in cvat_xml.findall('image'):\n image_name = img_tag.get('name')\n width = img_tag.get('width')\n height = img_tag.get('height')\n image_path = os.path.join(image_dir, image_name)\n if not os.path.exists(image_path):\n log.warn('{} image cannot be found. Is `{}` image directory correct?'.\n format(image_path, image_dir))\n\n unknown_tags = {x.tag for x in img_tag.iter()}.difference(KNOWN_TAGS)\n if unknown_tags:\n log.warn('Ignoring tags for image {}: {}'.format(image_path, unknown_tags))\n\n _yoloAnnotationContent = \"\"\n\n for box in img_tag.findall('box'):\n label = box.get('label')\n xmin = float(box.get('xtl'))\n ymin = float(box.get('ytl'))\n xmax = float(box.get('xbr'))\n ymax = float(box.get('ybr'))\n\n if not label in current_labels:\n current_labels[label] = auto_lbl_count\n auto_lbl_count += 1\n\n labelid = current_labels[label]\n yolo_x = (xmin + ((xmax-xmin)/2))/width\n yolo_y = (ymin + ((ymax-ymin)/2))/height\n yolo_w = (xmax - xmin) / width\n yolo_h = (ymax - ymin) / height\n\n if len(_yoloAnnotationContent) != 0:\n _yoloAnnotationContent += \"\\n\"\n\n _yoloAnnotationContent += str(labelid)+\" \"+\"{:.6f}\".format(yolo_x) + \" \"+\"{:.6f}\".format(\n yolo_y) + \" \"+\"{:.6f}\".format(yolo_w) + \" \"+\"{:.6f}\".format(yolo_h)\n\n anno_name = os.path.basename(os.path.splitext(image_name)[0] + '.txt')\n anno_path = os.path.join(image_dir, anno_name)\n\n _yoloFile = open(anno_path, \"w\", newline=\"\\n\")\n _yoloFile.write(_yoloAnnotationContent)\n _yoloFile.close()\n\n traintxt_file=open(output_dir+\"/train.txt\",\"w\",newline=\"\\n\")\n traintxt_file.write(traintxt)\n traintxt_file.close()"
] | [
"0.7400537",
"0.66784334",
"0.64540035",
"0.62201595",
"0.6089382",
"0.60512364",
"0.5901473",
"0.5742026",
"0.5689828",
"0.566298",
"0.5614185",
"0.5548671",
"0.55181766",
"0.54977727",
"0.54335207",
"0.54022497",
"0.53411245",
"0.5290524",
"0.51947296",
"0.5126708",
"0.5105023",
"0.51038444",
"0.5083665",
"0.5041383",
"0.50178385",
"0.49815032",
"0.4978497",
"0.49693292",
"0.49666274",
"0.49582243",
"0.4953561",
"0.49524152",
"0.4951551",
"0.4927939",
"0.49228534",
"0.49032572",
"0.48865974",
"0.4884239",
"0.48671398",
"0.4859872",
"0.4858701",
"0.48580325",
"0.48555303",
"0.48386428",
"0.48380134",
"0.48367414",
"0.48272806",
"0.4797197",
"0.47849455",
"0.47748467",
"0.47729436",
"0.47665304",
"0.4765029",
"0.47643185",
"0.47633827",
"0.4757936",
"0.475629",
"0.47255385",
"0.47240403",
"0.47235793",
"0.47180954",
"0.47030744",
"0.47026962",
"0.46955058",
"0.4694504",
"0.46871805",
"0.46827447",
"0.46800697",
"0.4677994",
"0.4674073",
"0.4649388",
"0.4637765",
"0.46348333",
"0.46271998",
"0.4626545",
"0.46262357",
"0.4619647",
"0.46183303",
"0.460724",
"0.460677",
"0.45996132",
"0.45929748",
"0.45924482",
"0.45848146",
"0.45737493",
"0.45639807",
"0.45635423",
"0.455709",
"0.45555708",
"0.4549323",
"0.45473137",
"0.45463553",
"0.45454508",
"0.45452124",
"0.45407575",
"0.45312428",
"0.45294568",
"0.45104283",
"0.4505351",
"0.44963598"
] | 0.68759656 | 1 |
transforms airspace in open air format to kml for google earth | def open_airspace_format_2_kml(self, source_file_txt):
# load template for kml file
self.load_kml_template(self.full_path_kml_template)
# load airspace source
self.load_airspace_open_air_format(source_file_txt)
self.kml_lines = self.kml_template['header']
self.kml_lines.extend(self.kml_template['good_subdivided']['head'])
# collect all A and B kml lines
kml_A = []
kml_B = []
# transform airspaces and attach to A and B collect-lists
for airspace in self.airspaces:
airspace.make_kml_format(self.kml_template)
if airspace.as_type == 'A':
kml_A.extend(airspace.kml_lines)
if airspace.as_type == 'B':
kml_B.extend(airspace.kml_lines)
self.kml_lines.extend(kml_A)
self.kml_lines.extend(self.kml_template['good_subdivided']['tail'])
# start B part
self.kml_lines.extend(self.kml_template['bad_subdivided']['head'])
self.kml_lines.extend(kml_B)
self.kml_lines.extend(self.kml_template['bad_subdivided']['tail'])
full_path_kml = source_file_txt[:-4] + '_converted.kml'
# uisave dialog
full_path_kml = filesavebox(default=full_path_kml, filetypes="*.kml")
if full_path_kml is None:
print('Airspace conversion was aborted by the user')
quit()
# write to file
f = open(full_path_kml, 'w')
f.writelines(self.kml_lines)
f.close()
print('Resulting KML files was saved to: %s' % full_path_kml) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def make_open_airspace_format(self):\n # Extract coordinates from KML\n for idxline in range(len(self.kml_lines)):\n if '<name>' in self.kml_lines[idxline]:\n self.name = self.kml_lines[idxline].replace('\\t', '').replace('<name>', '').replace('</name>', '').replace('\\n','')\n if not self.name.startswith('TS'):\n self.name = 'TS_' + self.name\n print('Type: %s | Name: %s' % (self.as_type, self.name))\n if '<coordinates>' in self.kml_lines[idxline]:\n self.coordinates_kml = self.kml_lines[idxline + 1].replace('\\t', '').replace('\\n', '')\n break\n # start conversion to airspace format\n \"\"\" AC A\n AN TS_Erzgeb\n AL FL98\n AH FL99\n DP 50:26:22 N 012:17:59 E\n DP 50:25:25 N 012:18:26 E\n DP 50:24:40 N 012:19:01 E\n DP 50:24:06 N 012:19:46 E\"\"\"\n\n # AC A\n self.txt_lines.append('AC %s\\n' % self.as_type)\n # AN TS_Erzgeb\n self.txt_lines.append('AN %s\\n' % self.name)\n # heights\n self.txt_lines.append('AL FL98\\n')\n self.txt_lines.append('AH FL99\\n')\n # coordinates\n for coo_pt in self.coordinates_kml.split(' ')[:-1]:\n # Target format: DP 50:26:22 N 012:17:59 E\n lat_long = coo_pt.split(',')\n # latitude\n latDecAsStr = lat_long[1].split('.')\n #if '.' not in latDecAsStr: # take care of case \"51\" instead of \"51.123456\"\n # latDecAsStr += '.000000'\n lat_degree = abs(int(latDecAsStr[0]))\n #print(f'latDecAsStr {latDecAsStr}')\n if len(latDecAsStr)==1:\n latDecAsStr.append('0')\n lat_secondDec = (float('0.' + latDecAsStr[1])*60) % 1\n lat_minute = round((float('0.' + latDecAsStr[1])*60) - lat_secondDec)\n lat_second = round(lat_secondDec*60)\n cooString = ('DP %02d:%02d:%02d' %(lat_degree,lat_minute,lat_second))\n if latDecAsStr[0].startswith('-'):\n cooString += ' S'\n else:\n cooString += ' N'\n # longitude\n #print(f'converting lat_long {lat_long}')\n # take care of case: no decimal sign included, case \"11\" instead of \"11.123456\"\n if '.' not in lat_long[0]:\n lat_long[0] += '.0'\n lonDecAsStr = lat_long[0].split('.')\n lon_degree = abs(int(lonDecAsStr[0]))\n lon_secondDec = (float('0.' + lonDecAsStr[1]) * 60) % 1\n lon_minute = round((float('0.' + lonDecAsStr[1]) * 60) - lon_secondDec)\n lon_second = round(lon_secondDec * 60)\n cooString += (' %03d:%02d:%02d' % (lon_degree, lon_minute, lon_second))\n if lonDecAsStr[0].startswith('-'):\n cooString += ' W'\n else:\n cooString += ' E'\n cooString += '\\n'\n self.txt_lines.append(cooString)",
"def make_kml_format(self,kml_template):\n if self.as_type == 'A':\n self.kml_lines = kml_template['good_subdivided']['placemark']\n elif self.as_type == 'B':\n self.kml_lines = kml_template['bad_subdivided']['placemark']\n else:\n print('Unknown airspace type')\n # get idx of name and coordinates\n idxLine = 0\n while idxLine < len(self.kml_lines):\n #print(self.kml_lines[idxLine]\n if self.kml_lines[idxLine].startswith('\\t\\t\\t\\t<name>'): # begin of airspace\n idx_name = idxLine\n if '\\t\\t\\t\\t\\t\\t\\t<coordinates>\\n' in self.kml_lines[idxLine]: # begin of airspace\n idx_coordinates = idxLine+1\n idxLine += 1\n # transform coordinates\n # add all coordinates: Format is:\n # source: 'DP 50:26:22 N 012:17:59 E\\n'\n # target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0\n coo_list = [] # collect list of coorinates as strings\n for line in self.txt_lines:\n if line.startswith('AN'):\n self.name = line[3:].replace('\\n','')\n self.kml_lines[idx_name] = '\\t\\t\\t\\t<name>%s</name>\\n' % self.name\n\n if line.startswith('DP'):\n # lon\n lon_deg = float(line[14:17])\n lon_min = float(line[18:20])\n lon_sec = float(line[21:23])\n lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg\n if line[24] == 'W':\n lon_dec *= -1 # negative if west\n # lat\n lat_deg = float(line[3:5])\n lat_min = float(line[6:8])\n lat_sec = float(line[9:11])\n lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg\n if line[12] == 'S':\n lat_dec *= -1 # negative if west\n # attach coordinates\n coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec))\n # store for later plotting\n self.lat_dec.append(lat_dec)\n self.lon_dec.append(lon_dec)\n\n # make sure that shape is closed --> first an last point must be the same\n if coo_list[0] != coo_list[-1]:\n coo_list.append(coo_list[0])\n self.lat_dec.append(self.lat_dec[0])\n self.lon_dec.append(self.lon_dec[0])\n\n # write coordinate strings into kml\n self.kml_lines[idx_coordinates] = '\\t\\t\\t\\t\\t\\t\\t\\t' # is prefix. Coordinates to be added as string below\n for pt in coo_list:\n self.kml_lines[idx_coordinates] += pt\n print('Converted airspace %s' % self.name)",
"def kml_2_open_airspace_and_json_format(self, full_path):\n # read file\n f = open(full_path,'r')\n kml = f.readlines()\n f.close()\n # find airspaces\n \"\"\"Placemark >\n < name > Bremen - Blumenthal\n Thermikplatte < / name >\n < styleUrl > # inline10</styleUrl>\n < Polygon >\n < tessellate > 1 < / tessellate >\n < outerBoundaryIs >\n < LinearRing >\n < coordinates >\n 8.529121049900063, 53.19549566929423, 0\n 8.52324583919868, 53.21131939607898, 0\n 8.545439298799483, 53.23055800702935, 0\n 8.588991466114615, 53.23047069814625, 0\n 8.575289966189502, 53.20745451706468, 0\n 8.560633120477348, 53.19724609335408, 0\n 8.529121049900063, 53.19549566929423, 0\n < / coordinates >\n \n < / LinearRing >\n < / outerBoundaryIs >\n < / Polygon >\n < / Placemark >\"\"\"\n container = []\n idxLine = 0\n did_not_pass_main_folder = True\n list_of_airspace_types_included = []\n while idxLine < len(kml):\n #print(kml[idxLine])\n #if '<Folder>' in kml[idxLine] and did_not_pass_main_folder:\n # # we have to jump over the first folder\n # print(f'Reading everything inside folder: {kml[idxLine]}')\n # did_not_pass_main_folder = False\n if '<Folder>' in kml[idxLine]: # begin of airspace\n as_type = kml[idxLine+1].replace('\\t','').replace('<name>','').replace('</name>\\n','') # <name>B</name>\n print('Reading AS-types: ' + as_type)\n list_of_airspace_types_included.append(as_type)\n #if not (as_type == 'A' or as_type == 'B'):\n # print('#### Check Folder / Airspace Types, must be \"A\" or \"B\" and try again (current %s)' % as_type)\n # msgbox('Check Folder / Airspace Types, are not \"A\" or \"B\" (current %s). Airspace E will be used for export.' % as_type)\n # as_type = 'E'\n\n if '<Placemark' in kml[idxLine]: # begin of airspace\n container = []\n if '</Placemark' in kml[idxLine]: # end of airspace\n # make sure only Polygons are stored\n for as_line in container:\n if '<Polygon>' in as_line:\n idx_lookAt_start = None\n for idx, line_of_container in enumerate(container):\n if \"<LookAt>\" in line_of_container:\n idx_lookAt_start = idx\n if \"</LookAt>\" in line_of_container:\n idx_lookAt_end = idx\n # Remove lookAt lines if necessary\n if idx_lookAt_start:\n container = container[0:idx_lookAt_start] + container[idx_lookAt_end+1::] # cut out look at part\n # append airspace to airspace list as airspace class\n self.airspaces.append(Airspace(lines=container, file_type='kml', as_type=as_type))\n container.append(kml[idxLine])\n idxLine += 1\n print('Loaded %d airspaces from KML-file (%s)' %(len(self.airspaces),full_path))\n # summary\n outlines = ['* KML conversion file, rename this line']\n json_dict = {\"circles\": [], \"polygons\": []}\n for airspace in self.airspaces:\n # prepare open-airspace formate\n outlines.append('\\n\\n') # separate airspaces\n outlines.extend(airspace.txt_lines)\n # prepare json\n json_dict['polygons'].append(airspace.json_dict)\n\n # write open airspace format\n target_path = full_path[:-4] + '_converted.txt'\n # uisave dialog\n\n target_path = filesavebox(default=target_path, filetypes=\"*.txt\")\n if target_path is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n f = open(target_path,'w')\n f.writelines(outlines)\n f.close()\n print('Result was written to: %s' % target_path)\n\n # write json:\n target_path_json = target_path[:-4] + '.json'\n\n json_string = json.dumps(json_dict)\n json_file = open(target_path_json, \"w\")\n json_file.write(json_string)\n json_file.close()\n\n # write list of airspace files for index.html for leaflet map\n print('The following airspace types have been converted:')\n print(list_of_airspace_types_included)",
"def keyholemarkup2x(file,output='df'):\n r = re.compile(r'(?<=\\.)km+[lz]?',re.I)\n try:\n extension = r.search(file).group(0) #(re.findall(r'(?<=\\.)[\\w]+',file))[-1]\n \n \n except IOError as e:\n logging.error(\"I/O error {0}\".format(e))\n if (extension.lower()=='kml') is True:\n buffer = file\n elif (extension.lower()=='kmz') is True:\n kmz = ZipFile(file, 'r')\n \n vmatch = np.vectorize(lambda x:bool(r.search(x)))\n A = np.array(kmz.namelist())\n sel = vmatch(A)\n buffer = kmz.open(A[sel][0],'r')\n \n else:\n raise ValueError('Incorrect file format entered. Please provide the '\n 'path to a valid KML or KMZ file.') \n \n \n parser = xml.sax.make_parser()\n handler = PlacemarkHandler()\n parser.setContentHandler(handler)\n parser.parse(buffer)\n \n try:\n kmz.close()\n except:\n pass\n \n df = pd.DataFrame(handler.mapping).T\n names = list(map(lambda x: x.lower(),df.columns))\n if 'description' in names:\n extradata = df.apply(PlacemarkHandler.htmlizer,axis=1)\n df = df.join(extradata)\n \n \n output = output.lower()\n \n if output=='df' or output=='dataframe' or output == None:\n result = df\n \n elif output=='csv':\n out_filename = file[:-3] + \"csv\"\n df.to_csv(out_filename,encoding='utf-8',sep=\"\\t\")\n result = (\"Successfully converted {0} to CSV and output to\"\n \" disk at {1}\".format(file,out_filename))\n \n elif output=='gpd' or output == 'gdf' or output=='geoframe' or output == 'geodataframe':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n result = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n \n \n elif output=='geojson' or output=='json':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n try:\n import geojson\n except ImportError as e:\n raise ImportError('This operation requires geojson. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"geojson\"\n gdf.to_file(out_filename,driver='GeoJSON')\n validation = geojson.is_valid(geojson.load(open(out_filename)))['valid']\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to GeoJSON and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The geojson conversion did not create a '\n 'valid geojson object. Try to clean your '\n 'data or try another file.')\n \n elif output=='shapefile' or output=='shp' or output =='esri shapefile':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n \n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n try:\n import shapefile\n except ImportError as e:\n raise ImportError('This operation requires pyshp. {0}'.format(e))\n \n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"shp\"\n gdf.to_file(out_filename,driver='ESRI Shapefile')\n sf = shapefile.Reader(out_filename)\n import shapefile\n sf = shapefile.Reader(out_filename)\n if len(sf.shapes())>0:\n validation = \"yes\"\n else:\n validation = \"no\"\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to Shapefile and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The Shapefile conversion did not create a '\n 'valid shapefile object. Try to clean your '\n 'data or try another file.') \n else:\n raise ValueError('The conversion returned no data; check if'\n ' you entered a correct output file type. '\n 'Valid output types are geojson, shapefile,'\n ' csv, geodataframe, and/or pandas dataframe.')\n \n return result",
"def make_input_data_kmls(rundata):\n \n import os\n from . import topotools, dtopotools\n\n regions2kml(rundata, combined=False)\n gauges2kml(rundata)\n\n topofiles = rundata.topo_data.topofiles\n for f in topofiles:\n topo_file_name = f[-1]\n topo_type = f[0]\n topo2kml(topo_file_name, topo_type)\n \n dtopofiles = rundata.dtopo_data.dtopofiles\n for f in dtopofiles:\n dtopo_file_name = f[-1]\n dtopo_type = f[0]\n dtopo2kml(dtopo_file_name, dtopo_type)",
"def make_json_airspace_format(self):\n # The previous fct make_open_airspace_format already stored, coordinates_kml, name and type\n # This data is collected in an dictionary, which then is stored as json.\n # initialize dict\n coordinates_as_list_of_floats = []\n # run through coordinates\n coordinates_as_list_of_floats = []\n for coo_pt in self.coordinates_kml.split(' ')[:-1]:\n lat_long = coo_pt.split(',')\n coordinates_as_list_of_floats.append([float(lat_long[1]), float(lat_long[0])])\n # make json dict\n # rename name if not thermal space\n if self.name.startswith('TS_') and not (self.as_type == 'A' or self.as_type == 'B'):\n name_for_json = self.name[3:]\n else:\n name_for_json = self.name\n # rename airspace type for json:\n if self.as_type == 'A':\n self.as_type = 'Good_thermals'\n if self.as_type == 'B':\n self.as_type = 'Bad_thermals'\n self.json_dict = {\"AL\": \"FL98\", \"AH\": \"FL99\", \"AC\": self.as_type, \"AN\": name_for_json, \"data\": coordinates_as_list_of_floats}",
"def __init__(self, full_path_of_source=''):\n if len(full_path_of_source) == 0:\n full_path_of_source = fileopenbox(default=os.path.curdir, filetypes=[\"*.txt\", \"*.kml\"])\n if full_path_of_source is None:\n print('Airspace conversion was aborted by the user')\n quit()\n # set template (this should not be changed)\n self.full_path_kml_template = r'Thermal_Map_Template5.kml' # set template file here: Folder must be named \"good\" and \"bad\"\n\n self.airspaces = [] # airspace container\n self.kml_template = {'header': [], 'good': [], 'bad': [], # will be filled after loading template\n 'good_subdivided': {'head':[], 'placemark': [], 'tail': []},\n 'bad_subdivided': {'head':[], 'placemark': [], 'tail': []}}\n self.txt_lines = [] # airspace file in open airspace format\n self.kml_lines = [] # airspace file in kml format\n \"\"\" handle conversion from and to KML / airspace format\"\"\"\n if full_path_of_source.lower().endswith('.kml'):\n self.kml_2_open_airspace_and_json_format(full_path_of_source)\n if full_path_of_source.lower().endswith('.txt'):\n self.open_airspace_format_2_kml(full_path_of_source)\n self.plot_all() # works for now only for TXT input",
"def makepkl():\n # Old osgeo.ogr approach\n from osgeo import ogr\n # USTimeZones.kml source is unknown, but was freely available and\n # Has been converted to a pkl file\n kmlpath = os.path.join(os.path.dirname(__file__), 'USTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(uspklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(uspklpath, 'w'))\n\n # WorldTimeZones.kml source is below and was freely available and\n # Has been converted to a pkl file\n # https://productforums.google.com/forum/?fromgroups=#!msg/gec-tools/EdR18tz_5k8/MRPV85OxXIkJ\n kmlpath = os.path.join(os.path.dirname(__file__), 'WorldTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(worldpklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(worldpklpath, 'w'))",
"def export_kmz(self):\n self.export_kml(kmz=True)",
"def from_enmap(emap):\n\n new_map = so_map()\n hdulist = emap.wcs.to_fits()\n header = hdulist[0].header\n new_map.pixel = header[\"CTYPE1\"][-3:]\n try:\n new_map.ncomp = header[\"NAXIS3\"]\n except:\n new_map.ncomp = 1\n new_map.data = emap.copy()\n new_map.nside = None\n new_map.geometry = new_map.data.geometry[1:]\n new_map.coordinate = header[\"RADESYS\"]\n if new_map.coordinate == \"ICRS\":\n new_map.coordinate = \"equ\"\n\n return new_map",
"def topo2kml(topo_file_name, topo_type, color='00FF00'):\n\n import os\n from clawpack.geoclaw import topotools\n topo = topotools.Topography(topo_file_name, topo_type=topo_type)\n topo.read_header()\n xy = topo.extent\n name = os.path.splitext(os.path.split(topo_file_name)[-1])[0]\n file_name = '%s.kml' % name\n box2kml(xy, file_name, name, color)",
"def dtopo2kml(dtopo_file_name, dtopo_type, color='8888FF'):\n\n import os\n from clawpack.geoclaw import dtopotools\n dtopo = dtopotools.DTopography()\n dtopo.read(dtopo_file_name, dtopo_type)\n x1 = dtopo.x.min()\n x2 = dtopo.x.max()\n y1 = dtopo.y.min()\n y2 = dtopo.y.max()\n xy = (x1,x2,y1,y2)\n name = os.path.splitext(os.path.split(dtopo_file_name)[-1])[0]\n file_name = '%s.kml' % name\n box2kml(xy, file_name, name, color)",
"def line2kml(xy,fname='line.kml',name='line',color='00FFFF',width=3,\n verbose=True):\n \n if type(xy[0]) is tuple:\n x1,x2 = xy[0]\n y1,y2 = xy[1]\n else:\n x1,x2,y1,y2 = xy[0:]\n\n if verbose:\n print(\"Line: %10.6f %10.6f %10.6f %10.6f\" % (x1,x2,y1,y2))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = name\n mapping['desc'] = \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\" % (f2s(y1),f2s(y2))\n mapping['color'] = color\n mapping['width'] = width\n\n region_text = kml_line(mapping)\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def get_kml_dict(self, tx, ty_tms, tz, image_format, draworder = 0):\n d = {}\n\n d[\"south\"], d[\"west\"], d[\"north\"], d[\"east\"] = self.tileswne(tx, ty_tms, tz)\n\n image_filename = get_tile_filename(tx, ty_tms, tz, format_extension[image_format],False)\n d[\"image_filename\"] = image_filename\n d[\"image_filename\"] = d[\"image_filename\"].replace(\"\\\\\",\"/\")\n\n if self.options.url is None:\n d[\"image_url\"] = \"../../%s\" % image_filename\n else:\n d[\"image_url\"] = \"%s%s\" % (self.options.url, image_filename)\n d[\"image_url\"] = d[\"image_url\"].replace(\"\\\\\",\"/\")\n\n url = self.options.url\n if url is None:\n # Top level KML is linked from `doc.kml' and it needs different path.\n if tz == self.tminz:\n url = \"\"\n else:\n url = \"../../\"\n\n if self.options.kmz:\n extension = \"kmz\"\n else:\n extension = \"kml\"\n\n d[\"link_url\"] = \"%s%s\" % (url, get_tile_filename(tx, ty_tms, tz, extension,False))\n d[\"link_url\"] = d[\"link_url\"].replace(\"\\\\\",\"/\")\n\n d[\"minlodpixels\"] = int(self.tilesize / 2)\n d[\"maxlodpixels\"] = -1 # int(self.tilesize * 8)\n\n if tx == 0:\n d[\"draw_order\"] = draworder + 2 * tz + 1\n else:\n d[\"draw_order\"] = draworder + 2 * tz\n\n return d",
"def generate_garmin_kml(self, d ):\n return (\"\"\"\n <GroundOverlay>\n <Icon>\n <href>%(image_url)s</href>\n <DrawOrder>%(draw_order)d</DrawOrder>\n </Icon>\n <LatLonBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonBox>\n </GroundOverlay>\"\"\" % d )",
"def kml(cls, user, logs, kml, kml_doc):\n # KML Compliant Datetime Formatter\n kml_datetime_format = \"%Y-%m-%dT%H:%M:%S.%fZ\"\n icon = 'http://maps.google.com/mapfiles/kml/shapes/airports.png'\n threshold = 1 # Degrees\n\n kml_folder = kml.newfolder(name=user.username)\n\n flights = TakeoffOrLandingEvent.flights(user)\n if len(flights) == 0:\n return\n\n logs = filter(lambda log: cls._is_bad_position(log, threshold), logs)\n for i, flight in enumerate(flights):\n label = 'Flight {}'.format(i + 1) # Flights are one-indexed\n kml_flight = kml_folder.newfolder(name=label)\n\n flight_logs = filter(lambda x: flight.within(x.timestamp), logs)\n if len(flight_logs) < 2:\n continue\n\n coords = []\n angles = []\n when = []\n for entry in flight_logs:\n pos = entry.uas_position.gps_position\n # Spatial Coordinates\n coord = (pos.longitude, pos.latitude,\n units.feet_to_meters(entry.uas_position.altitude_msl))\n coords.append(coord)\n\n # Time Elements\n time = entry.timestamp.strftime(kml_datetime_format)\n when.append(time)\n\n # Degrees heading, tilt, and roll\n angle = (entry.uas_heading, 0.0, 0.0)\n angles.append(angle)\n\n # Create a new track in the folder\n trk = kml_flight.newgxtrack(name='Flight Path')\n trk.altitudemode = AltitudeMode.absolute\n\n # Append flight data\n trk.newwhen(when)\n trk.newgxcoord(coords)\n trk.newgxangle(angles)\n\n # Set styling\n trk.extrude = 1 # Extend path to ground\n trk.style.linestyle.width = 2\n trk.style.linestyle.color = Color.blue\n trk.iconstyle.icon.href = icon\n\n for obstacle in MovingObstacle.objects.all():\n obstacle.kml(path=flight_logs, kml=kml_flight, kml_doc=kml_doc)",
"def poly2kml(xy,fname=None,name='poly',color='00FF00', width=3,\n verbose=True):\n\n if fname is None:\n fname = name + '.kml'\n\n x,y = xy\n\n if verbose:\n print(\"Polygon: %10.6f %10.6f\" % (x[0],y[0]))\n for j in range(1,len(x)):\n print(\" %10.6f %10.6f\" % (x[j],y[j]))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x'] = x\n mapping['y'] = y\n mapping['elev'] = elev\n mapping['name'] = name\n d = \" x[0] = %s, y[0] = %s\\n\" % (x[0],y[0]) \n for j in range(1,len(x)):\n d = d + \" x[%i] = %s, y[%i] = %s\" % (j,f2s(x[j]),j,f2s(y[j]))\n mapping['desc'] = d\n mapping['color'] = color\n mapping['width'] = width\n\n v = \"\\n\"\n for j in range(len(x)):\n v = v + \"%s,%s,%s\\n\" % (f2s(x[j]),f2s(y[j]),f2s(elev))\n v = v + \"%s,%s,%s\\n\" % (f2s(x[0]),f2s(y[0]),f2s(elev))\n v.replace(' ','')\n \n region_text = kml_region(mapping, v)\n for j in range(1,len(x)):\n d = d + \" x[%i] = %s, y[%i] = %s\" % (j,f2s(x[j]),j,f2s(y[j]))\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def regions2kml(rundata=None,fname='regions.kml',verbose=True,combined=True):\n\n from numpy import cos,pi,floor\n\n if rundata is None:\n try:\n import setrun\n reload(setrun)\n rundata = setrun.setrun()\n except:\n raise IOError(\"*** cannot execute setrun file\")\n\n clawdata = rundata.clawdata\n x1,y1 = clawdata.lower[0:]\n x2,y2 = clawdata.upper[0:]\n description = \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\\n\" % (f2s(y1),f2s(y2))\n\n mx,my = clawdata.num_cells[0:]\n dx = (x2-x1)/float(mx)\n dx_meters = dx*111e3*cos(pi*0.5*(y1+y2)/180.)\n dy = (y2-y1)/float(my)\n dy_meters = dy*111e3\n if verbose:\n print(\"Domain: %10.6f %10.6f %10.6f %10.6f\" % (x1,x2,y1,y2))\n dx_deg,dx_min,dx_sec = deg2dms(dx)\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n #print \"Level 1 resolution: dx = %g deg, %g min, %g sec = %g meters\" \\\n # % (dx_deg,dx_min,dx_sec,dx_meters)\n levtext = \"Level 1 resolution: dy = %g deg, %g min, %g sec = %g meters\\n\" \\\n % (dy_deg,dy_min,dy_sec,dy_meters)\n if verbose:\n print(levtext)\n description = description + levtext\n\n amr_levels_max = rundata.amrdata.amr_levels_max\n refinement_ratios_y = rundata.amrdata.refinement_ratios_y\n num_ref_ratios = len(refinement_ratios_y)\n if amr_levels_max > num_ref_ratios+1:\n raise IOError(\"*** Too few refinement ratios specified for \" \\\n + \"amr_levels_max = %i\" % amr_levels_max)\n dy_levels = (num_ref_ratios+1) * [dy]\n for k,r in enumerate(refinement_ratios_y):\n level = k+2\n dy = dy_levels[k] / r\n dy_levels[k+1] = dy\n dy_meters = dy*111e3\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n levtext = \"Level %s resolution: dy = %g deg, %g min, %g sec = %g meters (refined by %i)\\n\" \\\n % (level,dy_deg,dy_min,dy_sec,dy_meters,r)\n if verbose:\n print(levtext)\n description = description + levtext\n\n if verbose:\n print(\"Allowing maximum of %i levels\" % amr_levels_max)\n\n elev = 0.\n if not combined:\n fname = 'Domain.kml'\n\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = 'Computational Domain'\n mapping['desc'] = description\n mapping['color'] = \"0000FF\" # red\n mapping['width'] = 2\n\n region_text = kml_region(mapping)\n kml_text = kml_text + region_text\n\n if not combined:\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)\n\n \n\n regions = rundata.regiondata.regions\n if len(regions)==0 and verbose:\n print(\"No regions found in setrun.py\")\n\n\n for rnum,region in enumerate(regions):\n if not combined:\n fname = 'Region_%s.kml' % str(rnum).zfill(2)\n kml_text = kml_header(fname)\n\n minlevel,maxlevel = region[0:2]\n t1,t2 = region[2:4]\n x1,x2,y1,y2 = region[4:]\n\n if verbose:\n print(\"Region %i: %10.6f %10.6f %10.6f %10.6f\" \\\n % (rnum,x1,x2,y1,y2))\n print(\" minlevel = %i, maxlevel = %i\" \\\n % (minlevel,maxlevel) \\\n + \" t1 = %s, t2 = %s\" % (f2s(t1),f2s(t2)))\n mapping = {}\n mapping['minlevel'] = minlevel\n mapping['maxlevel'] = maxlevel\n mapping['t1'] = t1\n mapping['t2'] = t2\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = 'Region %i' % rnum\n description = \"minlevel = %i, maxlevel = %i\\n\" % (minlevel,maxlevel) \\\n + \" t1 = %s, t2 = %s\\n\" % (f2s(t1),f2s(t2)) \\\n + \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\\n\\n\" % (f2s(y1),f2s(y2))\n if len(dy_levels) >= minlevel:\n dy = dy_levels[minlevel-1]\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n dy_meters = dy*111e3\n levtext = \"Level %s resolution: \\ndy = %g deg, %g min, %g sec \\n= %g meters\\n\" \\\n % (minlevel,dy_deg,dy_min,dy_sec,dy_meters)\n description = description + levtext\n if (maxlevel > minlevel) and (len(dy_levels) >= maxlevel):\n dy = dy_levels[maxlevel-1]\n dy_deg,dy_min,dy_sec = deg2dms(dy)\n dy_meters = dy*111e3\n levtext = \"\\nLevel %s resolution: \\ndy = %g deg, %g min, %g sec \\n= %g meters\\n\" \\\n % (maxlevel,dy_deg,dy_min,dy_sec,dy_meters)\n description = description + levtext\n mapping['desc'] = description\n mapping['color'] = \"FFFFFF\" # white\n mapping['width'] = 3\n\n region_text = kml_region(mapping)\n kml_text = kml_text + region_text\n if not combined:\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)\n\n if combined:\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def image2kml(self,varname,filename=None):\n\n vdata=self.get_array(varname)\n im=self.get_image(vdata)\n if filename is None:\n filename='%s.png' % varname\n f=open(filename,'w')\n f.write(im)\n f.close()\n d=self.get_kml_dict(varname,filename)\n pylab.close('all')\n return self.__class__.kmlimage % d",
"def facility_as_kml(facility):\n return KML.Placemark(\n KML.name(facility.nrqz_id),\n KML.Point(KML.coordinates(f\"{facility.location.x},{facility.location.y}\")),\n )",
"def export_kml(self, kmz=False):\n orderby = self.orderby.get()\n currentregion = self.region.get()\n if kmz:\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".kmz\",\n filetypes=((\"keyhole markup language\", \"*.kmz\"),\n (\"All Files\", \"*.*\")))\n else:\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".kml\",\n filetypes=((\"keyhole markup language\", \"*.kml\"),\n (\"All Files\", \"*.*\")))\n if outputfile:\n self.tabs.window.aistracker.create_kml_map(\n outputfile, kmzoutput=kmz, orderby=orderby,\n region=currentregion)\n else:\n raise ExportAborted('Export cancelled by user.')",
"def k2lc(epic):\n prefix = epic[:4]\n id = epic[4:]\n c = \"01\"\n path = \"data/c01/{0}00000/{1}\".format(prefix, id)\n end = \"kepler_v1.0_lc.fits\"\n file = \"{0}/hlsp_everest_k2_llc_{1}-c{2}_{3}\".format(path, epic, c, end)\n x, y = process_data(file)\n return x, y",
"def run(self,\n altitude: float,\n day_of_year: float,\n local_time: float,\n latitude: float,\n longitude: float,\n f107: float,\n f107m: float,\n kp1: float,\n kp2: float,\n get_uncertainty: bool = False\n ):\n\n output_file = tempfile.NamedTemporaryFile(\n delete=False, suffix=\".out\", prefix=\"swami_\", mode=\"r+\")\n\n data_dtm = str(self.path_to_data)\n data_dtm = data_dtm + \"/\" if data_dtm[-1] != \"/\" else data_dtm\n data_um = str(os.path.join(self.path_to_data, \"um\"))\n data_um = data_um + \"/\" if data_um[-1] != \"/\" else data_um\n\n is_mcm = True if self.model is _AtmModel.MCM else False\n is_dtm = True if self.model is _AtmModel.DTM2020 else False\n is_um = True if self.model is _AtmModel.UM else False\n\n input_dict = {\n \"altitude\": float(altitude),\n \"day_of_year\": float(day_of_year),\n \"local_time\": float(local_time),\n \"latitude\": float(latitude),\n \"longitude\": float(longitude),\n \"f107\": float(f107),\n \"f107m\": float(f107m),\n \"kp1\": float(kp1),\n \"kp2\": float(kp2),\n \"bMCM\": is_mcm,\n \"bDTM\": is_dtm,\n \"bUM\": is_um,\n \"bUMstd\": bool(get_uncertainty), # and is_um,\n \"bDTMunc\": bool(get_uncertainty), # and is_dtm,\n \"data_dtm\": data_dtm,\n \"data_um\": data_um,\n \"output_file\": str(output_file.name)\n }\n\n input_file = self._generate_nml_from_dict(input_dict)\n\n cmd = [str(self.path_to_bin), input_file]\n\n proc = subprocess.run(cmd, check=True)\n\n out = self._read_output_file(output_file.name)\n out[\"_input\"] = input_dict\n\n os.unlink(input_file)\n os.unlink(output_file.name)\n\n return out",
"def generate_kml(tx, ty, tz, tileext, tilesize, tileswne, options, children=None, **args):\n if not children:\n children = []\n\n args['tx'], args['ty'], args['tz'] = tx, ty, tz\n args['tileformat'] = tileext\n if 'tilesize' not in args:\n args['tilesize'] = tilesize\n\n if 'minlodpixels' not in args:\n args['minlodpixels'] = int(args['tilesize'] / 2)\n if 'maxlodpixels' not in args:\n args['maxlodpixels'] = int(args['tilesize'] * 8)\n if children == []:\n args['maxlodpixels'] = -1\n\n if tx is None:\n tilekml = False\n args['title'] = options.title\n else:\n tilekml = True\n args['title'] = \"%d/%d/%d.kml\" % (tz, tx, ty)\n args['south'], args['west'], args['north'], args['east'] = tileswne(tx, ty, tz)\n\n if tx == 0:\n args['drawOrder'] = 2 * tz + 1\n elif tx is not None:\n args['drawOrder'] = 2 * tz\n else:\n args['drawOrder'] = 0\n\n url = options.url\n if not url:\n if tilekml:\n url = \"../../\"\n else:\n url = \"\"\n\n s = \"\"\"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<kml xmlns=\"http://www.opengis.net/kml/2.2\">\n <Document>\n <name>%(title)s</name>\n <description></description>\n <Style>\n <ListStyle id=\"hideChildren\">\n <listItemType>checkHideChildren</listItemType>\n </ListStyle>\n </Style>\"\"\" % args\n if tilekml:\n s += \"\"\"\n <Region>\n <LatLonAltBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonAltBox>\n <Lod>\n <minLodPixels>%(minlodpixels)d</minLodPixels>\n <maxLodPixels>%(maxlodpixels)d</maxLodPixels>\n </Lod>\n </Region>\n <GroundOverlay>\n <drawOrder>%(drawOrder)d</drawOrder>\n <Icon>\n <href>%(ty)d.%(tileformat)s</href>\n </Icon>\n <LatLonBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonBox>\n </GroundOverlay>\n\"\"\" % args\n\n for cx, cy, cz in children:\n csouth, cwest, cnorth, ceast = tileswne(cx, cy, cz)\n s += \"\"\"\n <NetworkLink>\n <name>%d/%d/%d.%s</name>\n <Region>\n <LatLonAltBox>\n <north>%.14f</north>\n <south>%.14f</south>\n <east>%.14f</east>\n <west>%.14f</west>\n </LatLonAltBox>\n <Lod>\n <minLodPixels>%d</minLodPixels>\n <maxLodPixels>-1</maxLodPixels>\n </Lod>\n </Region>\n <Link>\n <href>%s%d/%d/%d.kml</href>\n <viewRefreshMode>onRegion</viewRefreshMode>\n <viewFormat/>\n </Link>\n </NetworkLink>\n \"\"\" % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest,\n args['minlodpixels'], url, cz, cx, cy)\n\n s += \"\"\" </Document>\n</kml>\n \"\"\"\n return s",
"def prepare_ozi(mbbox, mwidth, mheight, name, transform):\n def deg(value, is_lon):\n degrees = math.floor(abs(value))\n minutes = (abs(value) - degrees) * 60\n return '{:4d},{:3.5F},{}'.format(\n int(round(degrees)), minutes,\n ('W' if is_lon else 'S') if value < 0 else ('E' if is_lon else 'N'))\n\n ozipoint = ('Point{:02d},xy, , ,in, deg, , ,N, , ,E' +\n ', grid, , , ,N')\n bbox = transform.backward(mbbox)\n points = \"\\n\".join([ozipoint.format(n) for n in range(3, 31)])\n header = '''OziExplorer Map Data File Version 2.2\nNik4\n{name}\n1 ,Map Code,\nWGS 84,WGS 84, 0.0000, 0.0000,WGS 84\nReserved 1\nReserved 2\nMagnetic Variation,,,E\nMap Projection,Mercator,PolyCal,No,AutoCalOnly,No,BSBUseWPX,No\nPoint01,xy, 0, 0,in, deg,{top},{left}, grid, , , ,N\nPoint02,xy, {width:4d}, {height:4d},in, deg,{bottom},{right}, grid, , , ,N\n{points}\nProjection Setup,,,,,,,,,,\nMap Feature = MF ; Map Comment = MC These follow if they exist\nTrack File = TF These follow if they exist\nMoving Map Parameters = MM? These follow if they exist\nMM0,Yes\nMMPNUM,4\nMMPXY,1,0,0\n'''.format(name=name,\n top=deg(bbox.maxy, False),\n left=deg(bbox.minx, True),\n width=mwidth - 1,\n height=mheight - 1,\n bottom=deg(bbox.miny, False),\n right=deg(bbox.maxx, True),\n points=points)\n return ''.join([\n header,\n \"MMPXY,2,{},0\\n\".format(mwidth),\n \"MMPXY,3,{},{}\\n\".format(mwidth, mheight),\n \"MMPXY,4,0,{}\\n\".format(mheight),\n 'MMPLL,1,{:4.6f},{:4.6f}\\n'.format(bbox.minx, bbox.maxy),\n 'MMPLL,2,{:4.6f},{:4.6f}\\n'.format(bbox.maxx, bbox.maxy),\n 'MMPLL,3,{:4.6f},{:4.6f}\\n'.format(bbox.maxx, bbox.miny),\n 'MMPLL,4,{:4.6f},{:4.6f}\\n'.format(bbox.minx, bbox.miny),\n \"MM1B,{}\\n\".format((mbbox.maxx - mbbox.minx) / mwidth * math.cos(\n math.radians(bbox.center().y))),\n \"MOP,Map Open Position,0,0\\n\",\n \"IWH,Map Image Width/Height,{},{}\\n\".format(mwidth, mheight),\n ])",
"def cmip6_renaming_dict():\n # I could probably simplify this with a generalized single dict, \n # which has every single possible `wrong` name and then for each model\n # the renaming function just goes through them...\n dim_name_dict = {\n \"AWI-CM-1-1-MR\":{},\n \"BCC-CSM2-MR\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"vertex\": None,\n 'time_bounds': \"time_bnds\",\n },\n \"BCC-ESM1\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"vertex\": \"vertex\",\n 'time_bounds': \"time_bnds\",\n },\n \"CAMS-CSM1-0\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n \"vertex\": 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n \"CanESM5\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n \"time_bounds\": \"time_bnds\",\n \"vertex\": \"vertices\",\n },\n \"CanESM5-CanOE\": {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n \"vertex\": \"vertices\",\n },\n \"CNRM-CM6-1\": {\n \"x\": [\"x\", 'lon'],\n \"y\": [\"y\", 'lat'],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\": \"axis_nbounds\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": \"bounds_lon\",\n \"lat_bounds\": \"bounds_lat\",\n 'vertex': \"nvertex\",\n 'time_bounds': \"time_bnds\",\n },\n \"CNRM-ESM2-1\": {\n \"x\": [\"x\", \"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": \"bounds_lon\",\n \"lat_bounds\": \"bounds_lat\",\n \"bnds\":\"axis_nbounds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"E3SM-1-0\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\":\"time_bounds\",\n 'vertex': None,\n },\n \"E3SM-1-1\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\":\"time_bounds\",\n 'vertex': None,\n },\n \"E3SM-1-1-ECA\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\":\"time_bounds\",\n 'vertex': None,\n },\n \"EC-Earth3-LR\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n \"EC-Earth3-Veg\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n \"EC-Earth3\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n \"FGOALS-f3-L\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n \"NICAM16-7S\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n 'vertex': 'vertices',\n },\n \"MIROC-ES2L\": {\n \"x\": [\"x\", 'lon'],\n \"y\": [\"y\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": [\"lev\", \"zlev\"],\n \"lev_bounds\": [\"lev_bnds\", \"zlev_bnds\"],\n \"lon_bounds\": \"x_bnds\",\n \"lat_bounds\": \"y_bnds\",\n \"time_bounds\": \"time_bnds\",\n 'vertex': 'vertices',\n },\n \"MIROC6\": {\n \"x\": [\"x\", 'lon'],\n \"y\": [\"y\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"x_bnds\",\n \"lat_bounds\": \"y_bnds\",\n 'time_bounds': \"time_bnds\",\n },\n \"HadGEM3-GC31-LL\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n },\n \"HadGEM3-GC31-MM\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n },\n \"UKESM1-0-LL\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n \"time_bounds\":\"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n 'GISS-E2-2-G': { \n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"GISS-E2-1-G-CC\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"GISS-E2-1-G\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"GISS-E2-1-H\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"CESM1-1-CAM5-CMIP5\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":\"d2\",\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2-WACCM\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":\"d2\",\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2-WACCM-FV2\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":\"d2\",\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":'d2',\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2-FV2\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":'d2',\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"GFDL-CM4\": {\n \"x\": [\"x\",\"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n # 'vertex': 'vertex',\n # 'dzt': 'thkcello',\n },\n \"GFDL-OM4p5B\": {\n \"x\": [\"x\",\"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n # 'vertex': 'vertex',\n # 'dzt': 'thkcello',\n },\n \"GFDL-ESM4\": {\n \"x\": [\"x\",\"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n # 'vertex': 'vertex',\n # 'dzt': 'thkcello',\n },\n \"NESM3\": {\n \"x\": ['i', \"lon\"],\n \"y\": ['j', \"lat\"],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n \"MRI-ESM2-0\": {\n \"x\": ['x', \"lon\"],\n \"y\": ['y', \"lat\"],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"bnds\":'bnds',\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": [\"x_bnds\", 'lon_bnds'],\n \"lat_bounds\": [\"y_bnds\", 'lat_bnds'],\n \"time_bounds\": \"time_bnds\",\n 'vertex': 'vertices',\n },\n \"SAM0-UNICON\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n \"MCM-UA-1-0\": {\n \"x\": \"longitude\",\n \"y\": \"latitude\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n }, \n 'IPSL-CM6A-LR': {\n \"x\": ['x', \"lon\"],\n \"y\": ['y', \"lat\"],\n \"lon\": 'nav_lon',\n \"lat\": 'nav_lat',\n \"lev\": [\"lev\",\"deptht\", \"olevel\"],\n \"lev_bounds\": [\"lev_bounds\", \"deptht_bounds\",'olevel_bounds'],\n \"lon_bounds\": \"bounds_nav_lon\",\n \"lat_bounds\": \"bounds_nav_lat\",\n 'vertex': 'nvertex',\n \"bnds\":\"axis_nbounds\",\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n 'NorCPM1': {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'NorESM1-F': {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'NorESM2-LM': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'NorESM2-MM': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\", # i leave this here because the names are the same as for the other Nor models.\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n \n 'MPI-ESM1-2-HR': {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'MPI-ESM1-2-LR': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'MPI-ESM-1-2-HAM': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'CNRM-CM6-1-HR': {\n \"x\": \"x\",\n \"y\": \"y\",\n \"lon\": 'lon',\n \"lat\": 'lat',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": \"bounds_lon\",\n \"lat_bounds\": \"bounds_lat\",\n 'vertex': None,\n 'time_bounds': \"time_bounds\",\n },\n 'FIO-ESM-2-0': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'ACCESS-ESM1-5': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'ACCESS-CM2': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'INM-CM4-8': { # this is a guess.\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n 'INM-CM5-0': { # this is a guess.\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n 'MRI-ESM2-0':{\n \"x\": \"x\",\n \"y\": \"y\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n# \"lon_bounds\": 'x_bnds',\n# \"lat_bounds\": 'y_bnds',\n# 'vertex': None, # this is a mess. there is yet another convention. Will have to deal with this once I wrap xgcm into here.\n 'time_bounds': \"time_bnds\",\n },\n 'CIESM': { # this is a guess.\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n# \"lev\": \"lev\", # no 3d data available as of now\n# \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'KACE-1-0-G': { # this is a guess.\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n# \"lev\": \"lev\", # no 3d data available as of now\n# \"lev_bounds\": \"lev_bnds\",\n# \"lon_bounds\": \"vertices_longitude\",\n# \"lat_bounds\": \"vertices_latitude\",\n# \"lon_bounds\": \"vertices_longitude\",\n# \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n \n }\n # cast all str into lists\n for model in dim_name_dict.keys():\n for field in dim_name_dict[model].keys():\n if isinstance(dim_name_dict[model][field], str) or dim_name_dict[model][field] is None :\n dim_name_dict[model][field] = [dim_name_dict[model][field]]\n# add 'lon' and 'lat' as possible logical indicies for all models. This should take care of all regridded ocean output and all atmosphere models.\n if 'x' in dim_name_dict[model].keys():\n if not 'lon' in dim_name_dict[model]['x']:\n dim_name_dict[model]['x'].append('lon')\n \n if 'y' in dim_name_dict[model].keys():\n if not 'lat' in dim_name_dict[model]['y']:\n dim_name_dict[model]['y'].append('lat') \n return dim_name_dict",
"def write_to_kml(gps_df, output_path):\n coordinates = []\n for index, row in gps_df.iterrows():\n lat = (1 if row['Lat_dir'] == 'N' else -1) * (float(row['Lat'][0:2]) + (float(row['Lat'][2:]) / 60))\n long = (1 if row['Long_dir'] == 'E' else -1) * (float(row['Long'][0:3]) + (float(row['Long'][3:]) / 60))\n speed = row['Speed']\n coordinates.append((long, lat, speed))\n\n kml_file = kml.newlinestring(name='line', coords=coordinates)\n kml_file.linestyle.color = simplekml.Color.cyan\n kml_file.linestyle.width = 3\n kml_file.polystyle.color = simplekml.Color.cyan\n kml_file.altitudemode = simplekml.AltitudeMode.relativetoground\n kml_file.extrude = 1\n\n # stores all coordinates into the output file\n with open(output_path, \"w+\"):\n kml.save(output_path, format=True)",
"def funcion_escribe_kml():\n\n DB = \"geoinfo\" # default database name\n LOGIN = \"gast\" # default login\n PASSWORD = \"gast\" # default password\n\n cnx = MySQLdb.connect(db=DB, user=LOGIN, passwd=PASSWORD)\n cursor = cnx.cursor()\n\n cursor.execute(\"SELECT * from wlan order by essid\")\n results = cursor.fetchall()\n\n print \"Total APs: %s\" % len(results) # print total AP count\n\n f = open(sys.argv[1], 'w')\n f.write('<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n')\n f.write('<kml xmlns=\"http://earth.google.com/kml/2.2\">\\n')\n f.write(' <Folder>\\n')\n f.write(' <name>GpsDrive+Kismet wifis</name>\\n')\n # By default folder is showed\n f.write(' <visibility>1</visibility>\\n')\n # GpsDrive icon\n f.write(' <ScreenOverlay>\\n')\n f.write(' <name>Info</name>\\n')\n f.write(' <description>Wifi data</description>\\n')\n f.write(' <visibility>1</visibility>\\n')\n f.write(' <Icon>\\n')\n f.write(' <href>https://raw.github.com/rodrigorega/GpsDriveToGoogleEarth/master/img/gpsdrivelogo.png</href>\\n')\n f.write(' </Icon>\\n')\n f.write(' <overlayXY x=\"0\" y=\"-1\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <screenXY x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <rotationXY x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <size x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' </ScreenOverlay>')\n\n # write all APs to .kml file\n for line in results:\n name = line[6].replace('&', 'and') # To avoid Google Earth errors\n wep = line[8]\n lat = line[1]\n lon = line[2]\n mac = line[5]\n\n f.write('\\n')\n f.write(' <Placemark>\\n')\n f.write(' <name>%s</name>\\n' % name)\n f.write(' <description>')\n f.write(' <![CDATA[ <table width=\"300\"><tr><td>')\n f.write(' - EESID: %s\\n <br />' % name)\n f.write(' - BBSID: %s\\n <br />' % mac)\n tipo_ap = funcion_tipo_ap(wep)\n f.write(' - Security: %s\\n <br />' % tipo_ap)\n f.write(' - GPS coords.: %s, %s\\n <br />' % (lon, lat))\n f.write(' </td></tr></table> ]]>')\n f.write(' </description>\\n')\n f.write(' <visibility>1</visibility>\\n')\n\n tipo_ap = funcion_tipo_ap(wep) # get AP type\n\n # Draw AP icon\n f.write('<Style>')\n f.write('<IconStyle>')\n f.write(' <Icon><href>https://raw.github.com/rodrigorega/GpsDriveToGoogleEarth/master/img/%s.png</href></Icon>\\n' % tipo_ap)\n f.write('</IconStyle>')\n f.write('</Style>')\n f.write(' <Point><coordinates>%s,%s,45</coordinates></Point>\\n' % (lon, lat))\n f.write(' </Placemark>\\n')\n\n f.write(' </Folder>\\n')\n f.write('</kml>')",
"def solar_model():\n \n latitude, longitude, timezone, elevation = location_input()\n year, time = time_input()\n\n lat_r = latitude/180*np.pi\n lon_r = longitude/180*np.pi \n n = 0\n for i in range(1900,year):\n if i%4 == 0:\n n += 366\n else:\n n+=365\n JulD = n + time + 2415018.5 - (timezone)/24\n LT = time - int(time)\n JC = (JulD - 2451545) / 36525\n x = 46.815 + JC * (0.00059 - JC * 0.001813)\n M_OE = 23 + (26 + (21.448 - JC * x) / 60) / 60\n EEO = 0.016708634 - JC * (0.000042037 + 0.0000001267 * JC)\n GMAS = 357.52911 + JC * (35999.05029 - 0.0001537 * JC)\n GMAS_r = m.radians(GMAS)\n GMLS = (280.46646 + JC * (36000.76983 + JC * 0.0003032))%360\n GMLS_r = m.radians(GMLS)\n Obliq_C = M_OE + 0.00256 * np.cos((125.04 - 1934.136 * JC) / 180 * np.pi)\n Obliq_C_r = m.radians(Obliq_C)\n SEC = np.sin(GMAS_r) * (1.914602 - JC * (0.004817 + 0.000014 * JC)) + np.sin(2 * GMAS_r) * (0.019993 - 0.000101 * JC) + np.sin(3 * GMAS_r) * 0.000289\n STL = GMLS + SEC\n SAL = STL - 0.00569 - 0.00478 * np.sin((125.04 - 1934.136 * JC) / 180 * np.pi)\n SAL_r = m.radians(SAL)\n sin_Delta = np.sin(Obliq_C_r) * np.sin(SAL_r)\n Delta_r = np.arcsin(sin_Delta) #in radians \n Var_y = np.tan((Obliq_C / 2) / 180 * np.pi) * np.tan((Obliq_C / 2) / 180 * np.pi)\n EOT_prime = Var_y * np.sin(2 * GMLS_r) - 2 * EEO * np.sin(GMAS_r) + 4 * EEO * Var_y * np.sin(GMAS_r) * np.cos(2 * GMLS_r) - 0.5 * Var_y * Var_y * np.sin(4 * GMLS_r) - 1.25 * EEO * EEO * np.sin(2 * GMAS_r)\n EOT = 4 * EOT_prime / np.pi * 180 \n TST = (LT * 1440 + EOT + 4 * longitude - 60 * timezone)%1440\n if TST / 4 < 0:\n Omega = TST/4+180\n else:\n Omega = TST/4 - 180 \n Omega_r = m.radians(Omega)\n \n cos_Zenith = np.sin(lat_r) * np.sin(Delta_r) + np.cos(lat_r) * np.cos(Delta_r) * np.cos(Omega_r)\n Zenith_r = np.arccos(cos_Zenith) #in radians\n Aprime_r = np.arccos((np.sin(lat_r) * np.cos(Zenith_r) - np.sin(Delta_r)) / (np.cos(lat_r) * np.sin(Zenith_r)))\n Aprime = Aprime_r / np.pi * 180\n if Omega > 0:\n Azimuth = (Aprime + 180) % 360 #in degrees\n else:\n Azimuth = (540 - Aprime) % 360 #in degrees \n Azimuth_r = Azimuth / 180 * np.pi\n Elev_angle = (np.pi)/2 - Zenith_r\n\n \n # calculate incidence angle\n # Beta is equal to angle of tilted surface to horizontal (in radians)\n Beta = 45 # in degrees\n Beta_r = m.radians(Beta)\n \n cos_incidence = np.sin(Delta_r)* np.sin(lat_r) * np.cos(Beta_r) - np.sin(Delta_r) * np.cos(lat_r) * np.sin(Beta_r) * np.cos(Azimuth_r) + np.cos(Delta_r) * np.cos(lat_r) * np.cos(Beta_r) * np.cos(Omega_r) + np.cos(Delta_r) * np.sin(lat_r) * np.sin(Beta_r) * np.cos(Azimuth_r) * np.cos(Omega_r) + np.cos(Delta_r) * np.sin(Beta_r) * np.sin(Azimuth_r) * np.sin(Omega_r) \n incidence_ang_r = np.arccos(cos_incidence)\n \n return Delta_r, lat_r, Omega_r, Zenith_r, Azimuth_r, Elev_angle",
"def writer(output, output_name, output_data):\n\n kml = simplekml.Kml(name=output_name)\n for exif in output_data:\n if('Latitude' in exif.keys() and\n 'Latitude Reference' in exif.keys() and\n 'Longitude Reference' in exif.keys() and\n 'Longitude' in exif.keys()):\n\n if 'Original Date' in exif.keys():\n dt = exif['Original Date']\n else:\n dt = 'N/A'\n\n if exif['Latitude Reference'] == 'S':\n latitude = '-' + exif['Latitude']\n else:\n latitude = exif['Latitude']\n\n if exif['Longitude Reference'] == 'W':\n longitude = '-' + exif['Longitude']\n else:\n longitude = exif['Longitude']\n\n kml.newpoint(name=exif['Name'],\n description='Originally Created: ' + dt,\n coords=[(longitude, latitude)])\n else:\n pass\n kml.save(os.path.join(output, output_name))",
"def convert_mtr_to_kittimot_format(data_list: List[Union[str, int, float]], frame_id: int) -> List[Union[str, int, float]]:\n annotation_list = []\n track_id = -1\n for data in data_list:\n annotation = [frame_id, -1]\n # print(\"type: \", str2id(bboxes['object_id']))\n object_type = data[0]\n truncated = -1\n occluded = -1\n alpha = -1\n bbox2d = [-1, -1, -1, -1]\n dimensions = data[1:4]\n location = data[4:7]\n rotation_y = data[7]\n\n annotation.append(object_type)\n annotation.append(truncated)\n annotation.append(occluded)\n annotation.append(alpha)\n annotation += bbox2d\n annotation += dimensions\n annotation += location\n annotation.append(rotation_y)\n annotation_list.append(annotation)\n return annotation_list\n\n\n\n \"\"\"\n convert KITTI MOTS format to AB3DMOT format\n\n \n @params:\n data_list: a list containing data in KITTI MOTs format\n \"\"\"",
"def gauges2kml(rundata=None, fname='gauges.kml', verbose=True):\n\n\n if rundata is None:\n try:\n import setrun\n reload(setrun)\n rundata = setrun.setrun()\n except:\n raise IOError(\"*** cannot execute setrun file\")\n\n elev = 0.\n kml_text = kml_header(fname)\n\n\n gauges = rundata.gaugedata.gauges\n if len(gauges)==0 and verbose:\n print(\"No gauges found in setrun.py\")\n\n\n for rnum,gauge in enumerate(gauges):\n t1,t2 = gauge[3:5]\n x1,y1 = gauge[1:3]\n gaugeno = gauge[0]\n if verbose:\n print(\"Gauge %i: %s, %s \\n\" % (gaugeno,f2s(x1),f2s(y1)) \\\n + \" t1 = %s, t2 = %s\" % (f2s(t1),f2s(t2)))\n mapping = {}\n mapping['gaugeno'] = gaugeno\n mapping['t1'] = t1\n mapping['t2'] = t2\n mapping['x1'] = x1\n mapping['y1'] = y1\n mapping['elev'] = elev\n mapping['name'] = 'Gauge %i' % rnum\n description = \" t1 = %s, t2 = %s\\n\" % (f2s(t1),f2s(t2)) \\\n + \" x1 = %s, y1 = %s\\n\" % (f2s(x1),f2s(y1))\n mapping['desc'] = description\n\n gauge_text = kml_gauge(mapping)\n kml_text = kml_text + gauge_text\n kml_text = kml_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def tsMap(self):\n mapplt = FermiMap()\n mapplt.savepath = self.workpath\n mapplt.image = self.outtsmap\n mapplt.figname = 'TSMAP.pdf'\n mapplt.cbarlabel = r'TS'\n mapplt.mapSky()\n if showSrc:\n srcs = self.getSrc()\n srcs = srcs[(srcs['Separation'] <= 3.) & ([not i.endswith('c') for i in srcs['Name']])]\n mapplt.srcSky(srcs['RA'], srcs['DEC'], srcs['Name'])\n mapplt.save()\n\n print(\"\\t=== Figure '{}' created ===\".format( os.path.join(mapplt.savepath, mapplt.figname) ))\n return",
"def test_convert_csv_to_kml(self):\n import tempfile\n from pykml.util import convert_csv_to_kml\n\n # create a CSV file for testing\n csvfile = tempfile.TemporaryFile(mode='w+')\n csvfile.write('name,snippet,lat,lon\\n')\n csvfile.write('first,The first one,45.0,-90.0\\n')\n csvfile.write('second,The second one,46.0,-89.0\\n')\n csvfile.write('third,\"The third one (with quotes)\",45.0,-88.0\\n')\n csvfile.seek(0)\n\n kmlobj = convert_csv_to_kml(csvfile)\n csvfile.close()\n\n target = etree.fromstring(\n '<kml '\n 'xmlns:atom=\"http://www.w3.org/2005/Atom\" '\n 'xmlns:gx=\"http://www.google.com/kml/ext/2.2\" '\n 'xmlns=\"http://www.opengis.net/kml/2.2\">'\n '<Document>'\n '<Folder>'\n '<name>KmlFile</name>'\n '<Placemark>'\n '<name>first</name>'\n '<Snippet maxLines=\"2\">The first one</Snippet>'\n '<description>'\n '<![CDATA['\n '<table border=\"1\"'\n '<tr><th>name</th><td>first</td></tr>'\n '<tr><th>snippet</th><td>The first one</td></tr>'\n '<tr><th>lat</th><td>45.0</td></tr>'\n '<tr><th>lon</th><td>-90.0</td></tr>'\n '</table>'\n ']]>'\n '</description>'\n '<Point>'\n '<coordinates>-90.0,45.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '<Placemark>'\n '<name>second</name>'\n '<Snippet maxLines=\"2\">The second one</Snippet>'\n '<description><![CDATA[<table border=\"1\"<tr><th>name</th><td>second</td></tr><tr><th>snippet</th><td>The second one</td></tr><tr><th>lat</th><td>46.0</td></tr><tr><th>lon</th><td>-89.0</td></tr></table>]]></description>'\n '<Point>'\n '<coordinates>-89.0,46.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '<Placemark>'\n '<name>third</name>'\n '<Snippet maxLines=\"2\">The third one (with quotes)</Snippet>'\n '<description><![CDATA[<table border=\"1\"<tr><th>name</th><td>third</td></tr><tr><th>snippet</th><td>The third one (with quotes)</td></tr><tr><th>lat</th><td>45.0</td></tr><tr><th>lon</th><td>-88.0</td></tr></table>]]></description>'\n '<Point>'\n '<coordinates>-88.0,45.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '</Folder>'\n '</Document>'\n '</kml>'\n )\n self.assertTrue(compare_xml(target, kmlobj))",
"def generate_sigmet_geojson(sigair_met_list):\n\n # Initialise Variables\n used_groups = [] #contains applicable groupings for use on the web page (i.e. it excludes groupings that do not appear) - used to filter layers on the map\n used_layers = []\n sigair_met_features = []\n\n # If there are no Sig/Airmets (incase None is passed)\n if sigair_met_list is None:\n return sigair_met_features, used_groups, used_layers\n \n # Create the Fill Colour attributes\n fill_col = {}\n line_col = {}\n \n # Set fill colours for SIGMET\n colr = current_app.config['WEATHER_SIGMET_COLOUR']\n opacity = current_app.config['WEATHER_SIGMET_OPACITY']\n col_r = int(colr[1:3],16)\n col_g = int(colr[3:5],16)\n col_b = int(colr[5:7],16)\n\n fill_col['SIGMET'] = f'rgba({col_r},{col_g},{col_b},{opacity})'\n line_col['SIGMET'] = f'rgba({int(col_r*0.75)},{int(col_g*0.75)},{int(col_b*0.75)},1)' #f'rgba({col_r},{col_g},{col_b},1)'\n\n # Set fill colours for AIRMET\n colr = current_app.config['WEATHER_AIRMET_COLOUR']\n opacity = current_app.config['WEATHER_AIRMET_OPACITY']\n col_r = int(colr[1:3],16)\n col_g = int(colr[3:5],16)\n col_b = int(colr[5:7],16)\n\n fill_col['AIRMET'] = f'rgba({col_r},{col_g},{col_b},{opacity})'\n line_col['AIRMET'] = f'rgba({int(col_r*0.75)},{int(col_g*0.75)},{int(col_b*0.75)},1)'\n\n\n # Create a GEOJSON Feature for each Notam - Feature contains specific Notam attributes\n for met in sigair_met_list:\n \n geojson_geom=Polygon([met['coords']])\n\n # Append this Feature to the collection, setting the various attributes as properties\n sigair_met_features.append(Feature(geometry=geojson_geom, properties={'fill':fill_col[met['type']], 'line':line_col[met['type']], \n 'group': met['type'],\n 'valid_from': datetime.strftime(met['valid_from'], '%d-%b %H:%M'),\n 'valid_to': datetime.strftime(met['valid_to'], '%d-%b %H:%M'),\n 'layer_group': met['type']+'_polygon', \n 'text': met['body'],\n 'flight_levels': met['flevels']}))\n\n # Add this group+geometry combination to the list, so the map knows to split out a layer for it.\n if (met['type'] + '_polygon') not in used_layers:\n used_layers.append(met['type'] + '_polygon')\n\n # Add the Notam Grouping to the collection of used groups\n if met['type'] not in used_groups:\n used_groups.append(met['type'])\n \n # Sort groups alphabetically for better display on the map\n \n return sigair_met_features, used_groups, used_layers",
"def reprojectQcew(overwrite=False):\n\n\tif exists(qcew_2913) and not overwrite:\n\t\tprint '\\nstate plane qcew already exists, if you wish to'\n\t\tprint 'overwrite the existing file use the \"overwrite\" flag\\n'\n\t\treturn\n\n\tgeom_type = 'POINT'\n\ttemplate = src_qcew\n\tospn = arcpy.SpatialReference(2913)\n\tmanagement.CreateFeatureclass(dirname(qcew_2913),\n\t\tbasename(qcew_2913), geom_type, template, spatial_reference=ospn)\n\n\ti_cursor = da.InsertCursor(qcew_2913, '*')\n\n\ts_fields = ['Shape@', '*']\n\twith da.SearchCursor(src_qcew, s_fields) as s_cursor:\n\t\t# replace point coordinates with geometry object in field\n\t\t# definition\n\t\tfields = list(s_cursor.fields)\n\t\tfields[1] = fields.pop(0)\n\n\t\tfor row in s_cursor:\n\t\t\tlist_row = list(row)\n\t\t\tlist_row[1] = list_row.pop(0)\n\t\t\td = OrderedDict(zip(fields, list_row))\n\n\t\t\tgeom = d['Shape@']\n\t\t\tgeom_2913 = geom.projectAs(ospn) \n\t\t\td['Shape@'] = geom_2913\n\t\t\td['POINT_X'] = geom_2913.firstPoint.X\n\t\t\td['POINT_Y'] = geom_2913.firstPoint.Y\n\n\t\t\twrite_row = [v for v in d.values()]\n\t\t\ti_cursor.insertRow(write_row)\n\n\tdel i_cursor",
"def airports(osm_path): \n return (retrieve(osm_path,'multipolygons',['aeroway'],**{'aeroway':[\"='aerodrome'\"]})).rename(columns={'aeroway': 'asset'})",
"def car2car(map_car, template):\n\n project = template.copy()\n project.data = enmap.project(map_car.data, template.data.shape, template.data.wcs)\n return project",
"def quad2kml(xy,fname=None,name='quad',color='FF0000',width=3,verbose=True):\n\n if fname is None:\n fname = name + '.kml'\n\n if type(xy[0]) is tuple:\n x1,x2,x3,x4 = xy[0]\n y1,y2,y3,y4 = xy[1]\n else:\n x1,y1,x2,y2,x3,y3,x4,y4 = xy[0:]\n\n if verbose:\n print(\"Quadrilateral: %10.6f %10.6f\" % (x1,y1))\n print(\" %10.6f %10.6f\" % (x2,y2))\n print(\" %10.6f %10.6f\" % (x3,y3))\n print(\" %10.6f %10.6f\" % (x4,y4))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['x3'] = x3\n mapping['x4'] = x4\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['y3'] = y3\n mapping['y4'] = y4\n mapping['elev'] = elev\n mapping['name'] = name\n mapping['desc'] = \" x1 = %s, y1 = %s\\n\" % (f2s(x1),f2s(y1)) \\\n + \" x2 = %s, y2 = %s\" % (f2s(x2),f2s(y2)) \\\n + \" x3 = %s, y3 = %s\" % (f2s(x3),f2s(y3)) \\\n + \" x4 = %s, y4 = %s\" % (f2s(x4),f2s(y4))\n mapping['color'] = color\n mapping['width'] = 3\n\n region_text = kml_region(mapping)\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def get_kml_object(filename: str) -> fastkml.kml.KML:\n\t\n\tkml_obj = fastkml.kml.KML()\n\t\n\twith open(filename) as file:\n\t\tkml_obj.from_string(file.read().encode(\"utf-8\"))\n\t\n\treturn kml_obj",
"def get_kml_dict(self,name,filename):\n\n lon1,lon2,lat1,lat2=self.get_bounds()\n d={'lat1':lat1,'lat2':lat2,'lon1':lon1,'lon2':lon2, \\\n 'name':name,'filename':filename,'time':self.get_time()}\n return d",
"def drought_map_nwmforecast(request):\n \n view_center = [-105.2, 39.0]\n view_options = MVView(\n projection='EPSG:4326',\n center=view_center,\n zoom=7.0,\n maxZoom=12,\n minZoom=5\n )\n\n # TIGER state/county mapserver\n tiger_boundaries = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/State_County/MapServer'},\n legend_title='States & Counties',\n layer_options={'visible':True,'opacity':0.8},\n legend_extent=[-112, 36.3, -98.5, 41.66]) \n \n # USGS Rest server for HUC watersheds \n watersheds = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://hydro.nationalmap.gov/arcgis/rest/services/wbd/MapServer'},\n legend_title='HUC Watersheds',\n layer_options={'visible':False,'opacity':0.4},\n legend_extent=[-112, 36.3, -98.5, 41.66])\n\n # NOAA Rest server for NWM streamflow \n nwm_stream = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://mapservice.nohrsc.noaa.gov/arcgis/rest/services/national_water_model/NWM_Stream_Analysis/MapServer',\n 'params': {'LAYERS': 'show:1,2,3,4,5,12'}},\n legend_title='NWM Streamflow',\n layer_options={'visible':False,'opacity':1.0},\n legend_classes=[\n MVLegendClass('line', '> 1.25M', stroke='rgba(75,0,115,0.9)'),\n MVLegendClass('line', '500K - 1.25M', stroke='rgba(176,28,232,0.9)'),\n MVLegendClass('line', '100K - 500K', stroke='rgba(246,82,213,0.9)'),\n MVLegendClass('line', '50K - 100K', stroke='rgba(254,7,7,0.9)'),\n MVLegendClass('line', '25K - 50K', stroke='rgba(252,138,23,0.9)'),\n MVLegendClass('line', '10K - 25K', stroke='rgba(45,108,183,0.9)'),\n MVLegendClass('line', '5K - 10K', stroke='rgba(27,127,254,0.9)'),\n MVLegendClass('line', '2.5K - 5K', stroke='rgba(79,169,195,0.9)'),\n MVLegendClass('line', '250 - 2.5K', stroke='rgba(122,219,250,0.9)'),\n MVLegendClass('line', '0 - 250', stroke='rgba(206,222,251,0.9)'),\n MVLegendClass('line', 'No Data', stroke='rgba(195,199,201,0.9)')],\n legend_extent=[-112, 36.3, -98.5, 41.66])\n nwm_stream_anom = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://mapservice.nohrsc.noaa.gov/arcgis/rest/services/national_water_model/NWM_Stream_Analysis/MapServer',\n 'params': {'LAYERS': 'show:7,8,9,10,11,12'}},\n legend_title='NWM Flow Anamaly',\n layer_options={'visible':True,'opacity':1.0},\n legend_classes=[\n MVLegendClass('line', 'High', stroke='rgba(176,28,232,0.9)'),\n MVLegendClass('line', '', stroke='rgba(61,46,231,0.9)'),\n MVLegendClass('line', '', stroke='rgba(52,231,181,0.9)'),\n MVLegendClass('line', 'Moderate', stroke='rgba(102,218,148,0.9)'),\n MVLegendClass('line', '', stroke='rgba(241,156,77,0.9)'),\n MVLegendClass('line', '', stroke='rgba(175,62,44,0.9)'),\n MVLegendClass('line', 'Low', stroke='rgba(241,42,90,0.9)'),\n MVLegendClass('line', 'No Data', stroke='rgba(195,199,201,0.9)')],\n legend_extent=[-112, 36.3, -98.5, 41.66])\n\n # NOAA Rest server for NWM soil moisture\n nwm_soil_legend = MVLegendGeoServerImageClass(value='test', style='green', layer='NWM_Land_Analysis',\n geoserver_url='https://mapservice.nohrsc.noaa.gov/arcgis/rest/services/national_water_model/NWM_Land_Analysis/MapServer/legend?f=pjson') \n nwm_soil = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://mapservice.nohrsc.noaa.gov/arcgis/rest/services/national_water_model/NWM_Land_Analysis/MapServer'},\n legend_title='NWM Soil Moisture (%)',\n layer_options={'visible':True,'opacity':0.5},\n legend_classes=[\n MVLegendClass('polygon', '0.95 - 1.0', fill='rgba(49,56,148,0.5)'),\n MVLegendClass('polygon', '0.85 - 0.95', fill='rgba(97,108,181,0.5)'),\n MVLegendClass('polygon', '0.75 - 0.85', fill='rgba(145,180,216,0.5)'),\n MVLegendClass('polygon', '0.65 - 0.75', fill='rgba(189,225,225,0.5)'),\n MVLegendClass('polygon', '0.55 - 0.65', fill='rgba(223,240,209,0.5)'),\n MVLegendClass('polygon', '0.45 - 0.55', fill='rgba(225,255,191,0.5)'),\n MVLegendClass('polygon', '0.35 - 0.45', fill='rgba(255,222,150,0.5)'),\n MVLegendClass('polygon', '0.25 - 0.35', fill='rgba(255,188,112,0.5)'),\n MVLegendClass('polygon', '0.15 - 0.25', fill='rgba(235,141,81,0.5)'),\n MVLegendClass('polygon', '0.05 - 0.15', fill='rgba(201,77,58,0.5)'),\n MVLegendClass('polygon', '0 - 0.05', fill='rgba(166,0,38,0.5)')],\n legend_extent=[-112, 36.3, -98.5, 41.66])\n \n\n # Define map view options\n drought_nwmfx_map_view_options = MapView(\n height='100%',\n width='100%',\n controls=['ZoomSlider', 'Rotate', 'ScaleLine', 'FullScreen',\n {'MousePosition': {'projection': 'EPSG:4326'}},\n {'ZoomToExtent': {'projection': 'EPSG:4326', 'extent': [-112, 36.3, -98.5, 41.66]}}],\n layers=[tiger_boundaries,nwm_stream_anom,nwm_stream,nwm_soil,watersheds],\n view=view_options,\n basemap='OpenStreetMap',\n legend=True\n )\n \n toggle_switch = ToggleSwitch(display_text='Defualt Toggle',\n name='toggle1')\n\n context = {\n 'drought_nwmfx_map_view_options':drought_nwmfx_map_view_options,\n 'toggle_switch': toggle_switch,\n }\n\n return render(request, 'co_drought/drought_nwmfx.html', context)",
"def GEEmacaGCMs(ptsFile,metric,timeStep,startYear,endYear,scenarios,buf,poly,models,\n username,folderOut, scalePix = 4000):\n \n # load required libraries\n import ee\n \n # Initialize the Earth Engine object, using the authentication credentials.\n ee.Initialize()\n\n ID_field = \"geeID\"\n\n #load pts or poly file\n pts1 = ee.FeatureCollection('users/' + username + '/' + str(ptsFile))\n\n time_d = {}\n time_d['month'] = 'projm'\n time_d['year'] = 'projy'\n \n for met in metric:\n\n for scenario in scenarios:\n\n for model in models:\n\n MACA = (ee.ImageCollection('IDAHO_EPSCOR/MACAv2_METDATA_MONTHLY')\n .select(met)\n .filterMetadata('model', 'equals', model)\n .filterMetadata('scenario', 'equals', scenario))\n\n metL = [met]\n \n years = list(range(startYear, endYear + 1))\n yearsEE = ee.List(years)\n \n if all([(timeStep == 'year'),any([(met == 'tasmin'),(met == 'tasmax'),\n (met == 'huss'),(met == 'rsds'),\n (met == 'was')])]):\n\n def map_m(i):\n i = ee.Number(i).int()\n image2 = (MACA\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .first())\n filtered = (MACA\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .mean()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(yearsEE.map(map_m).flatten())\n\n elif (timeStep == 'month'):\n \n img_col = MACA.filter(ee.Filter.calendarRange(startYear, endYear, 'year'))\n\n elif all([(timeStep == 'year'),(met == 'pr')]):\n\n def map_m(i):\n i = ee.Number(i).int()\n image2 = (MACA\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .first())\n filtered = (MACA\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .sum()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(yearsEE.map(map_m).flatten())\n\n #else:\n #print(\"incorrect time step specified\")\n \n if buf > 0:\n bufL = [buf]\n def bufferPoly(feature):\n return feature.buffer(bufL[0])\n\n ptsB = pts1.map(bufferPoly)\n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = ptsB.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_MACA_'+str(met)+'_'+scenario+'_'+model+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_ptsB',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('buffered pts by:' + str(buf) + ' for MACA: ' + met + ' ' + scenario + ' ' + model)\n\n elif poly > 0:\n \n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_MACA_'+str(met)+'_'+scenario+'_'+model+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_poly1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('spatial mean in poly: no buffer for MACA: ' + met + ' ' + scenario + ' ' + model)\n\n else:\n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_MACA_'+str(met)+'_'+scenario+'_'+model+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_pts1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n #print('value at point: no buffer for MACA: ' + met + ' ' + scenario + ' ' + model)",
"def read_szx_fmv_11(eps_file):\n raw_data = eps_file.scaled_mdr\n raw_unscaled = eps_file.mdr\n mphr = eps_file.mphr\n\n n_node_per_line = raw_data[\"LONGITUDE\"].shape[1]\n n_lines = raw_data[\"LONGITUDE\"].shape[0]\n n_records = raw_data[\"LONGITUDE\"].size\n\n data = {}\n metadata = {}\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n ascat_time = shortcdstime2jd(raw_data[\"UTC_LINE_NODES\"].flatten()[\"day\"],\n raw_data[\"UTC_LINE_NODES\"].flatten()[\"time\"])\n data[\"jd\"] = ascat_time[idx_nodes]\n\n metadata[\"spacecraft_id\"] = np.int8(mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(mphr[\"ORBIT_START\"])\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n\n for f in fields:\n metadata[f] = np.int16(mphr[f.upper()])\n\n fields = [\"sat_track_azi\"]\n for f in fields:\n data[f] = raw_data[f.upper()].flatten()[idx_nodes]\n\n fields = [(\"longitude\", long_nan), (\"latitude\", long_nan),\n (\"swath_indicator\", byte_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].flatten()\n valid = raw_unscaled[f.upper()].flatten() != nan_val\n data[f][~valid] = nan_val\n\n fields = [(\"sigma0_trip\", long_nan), (\"inc_angle_trip\", uint_nan),\n (\"azi_angle_trip\", int_nan), (\"kp\", uint_nan),\n (\"f_kp\", byte_nan), (\"f_usable\", byte_nan), (\"f_f\", uint_nan),\n (\"f_v\", uint_nan), (\"f_oa\", uint_nan), (\"f_sa\", uint_nan),\n (\"f_tel\", uint_nan), (\"f_land\", uint_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].reshape(n_records, 3)\n valid = raw_unscaled[f.upper()].reshape(n_records, 3) != nan_val\n data[f][~valid] = nan_val\n\n # modify longitudes from (0, 360) to (-180,180)\n mask = np.logical_and(data[\"longitude\"] != long_nan,\n data[\"longitude\"] > 180)\n data[\"longitude\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n mask = (data[\"azi_angle_trip\"] != int_nan) & (data[\"azi_angle_trip\"] < 0)\n data[\"azi_angle_trip\"][mask] += 360\n\n data[\"node_num\"] = np.tile((np.arange(n_node_per_line) + 1),\n n_lines).astype(np.uint8)\n data[\"line_num\"] = idx_nodes.astype(np.uint16)\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n return data, metadata",
"def read_kml():\n global kmldata\n global CONFIG\n if type(kmldata) == type(None):\n if not os.path.exists(CONFIG[\"kmlfile\"]):\n fiona.drvsupport.supported_drivers['KML'] = 'rw'\n kmldata = geopandas.read_file(CONFIG[\"kmlrepo\"], driver=\"KML\")\n os.makedirs(CONFIG[\"cachedir\"],exist_ok=True)\n with open(CONFIG[\"kmlfile\"], \"wb\") as fh:\n pickle.dump(kmldata,fh)\n else:\n with open(CONFIG[\"kmlfile\"], \"rb\") as fh:\n kmldata = pickle.load(fh)\n return kmldata",
"def ip_to_kml(ip, reader):\n try:\n response = reader.city(ip)\n longitude = response.location.longitude\n latitude = response.location.latitude\n kml = (\n \"<Placemark>\\n\"\n \"<name>%s</name>\\n\"\n \"<Point>\\n\"\n \"<coordinates>%6f,%6f</coordinates>\\n\"\n \"</Point>\\n\"\n \"</Placemark>\\n\"\n ) % (ip, longitude, latitude)\n return kml\n except:\n # the ip was not found in the database\n return None",
"def wgs84_wkt():\n return WGS84.to_wkt()",
"def WGS84toOSGB36(lat, lon):\n\t# First convert to radians\n\t# These are on the wrong ellipsoid currently: GRS80. (Denoted by _1)\n\tlat_1 = lat*pi/180\n\tlon_1 = lon*pi/180\n\t\n\t# Want to convert to the Airy 1830 ellipsoid, which has the following:\n\t# The GSR80 semi-major and semi-minor axes used for WGS84(m)\n\ta_1, b_1 = 6378137.000, 6356752.3141\n\te2_1 = 1 - (b_1*b_1)/(a_1*a_1) # The eccentricity of the GRS80 ellipsoid\n\tnu_1 = a_1/sqrt(1-e2_1*sin(lat_1)**2)\n\t\n\t# First convert to cartesian from spherical polar coordinates\n\tH = 0 # Third spherical coord.\n\tx_1 = (nu_1 + H)*cos(lat_1)*cos(lon_1)\n\ty_1 = (nu_1 + H)*cos(lat_1)*sin(lon_1)\n\tz_1 = ((1-e2_1)*nu_1 + H)*sin(lat_1)\n\t\n\t# Perform Helmut transform (to go between GRS80 (_1) and Airy 1830 (_2))\n\ts = 20.4894*10**-6 # The scale factor -1\n\t# The translations along x,y,z axes respectively\n\ttx, ty, tz = -446.448, 125.157, -542.060\n\t# The rotations along x,y,z respectively, in seconds\n\trxs, rys, rzs = -0.1502, -0.2470, -0.8421\n\t# In radians\n\trx, ry, rz = rxs*pi/(180*3600.), rys*pi/(180*3600.), rzs*pi/(180*3600.)\n\tx_2 = tx + (1+s)*x_1 + (-rz)*y_1 + (ry)*z_1\n\ty_2 = ty + (rz)*x_1 + (1+s)*y_1 + (-rx)*z_1\n\tz_2 = tz + (-ry)*x_1 + (rx)*y_1 + (1+s)*z_1\n\t\n\t# Back to spherical polar coordinates from cartesian\n\t# Need some of the characteristics of the new ellipsoid\n\t# The GSR80 semi-major and semi-minor axes used for WGS84(m)\n\ta, b = 6377563.396, 6356256.909\n\te2 = 1 - (b*b)/(a*a) # The eccentricity of the Airy 1830 ellipsoid\n\tp = sqrt(x_2**2 + y_2**2)\n\t\n\t# Lat is obtained by an iterative proceedure:\n\tlat = atan2(z_2, (p*(1-e2))) # Initial value\n\tlatold = 2*pi\n\twhile abs(lat - latold) > 10**-16:\n\t\tlat, latold = latold, lat\n\t\tnu = a/sqrt(1-e2*sin(latold)**2)\n\t\tlat = atan2(z_2+e2*nu*sin(latold), p)\n\t\t\n\t# Lon and height are then pretty easy\n\tlon = atan2(y_2, x_2)\n\tH = p/cos(lat) - nu\n\t\n\t# E, N are the British national grid coordinates - eastings and northings\n\tF0 = 0.9996012717 # scale factor on the central meridian\n\tlat0 = 49*pi/180 # Latitude of true origin (radians)\n\tlon0 = -2*pi/180 # Longtitude of true origin and central meridian (radians)\n\tN0, E0 = -100000, 400000 # Northing & easting of true origin (m)\n\tn = (a-b)/(a+b)\n\t\n\t# meridional radius of curvature\n\trho = a*F0*(1-e2)*(1-e2*sin(lat)**2)**(-1.5)\n\teta2 = nu*F0/rho-1\n\t\n\tM1 = (1 + n + (5/4)*n**2 + (5/4)*n**3) * (lat-lat0)\n\tM2 = (3*n + 3*n**2 + (21/8)*n**3) * sin(lat-lat0) * cos(lat+lat0)\n\tM3 = ((15/8)*n**2 + (15/8)*n**3) * sin(2*(lat-lat0)) * cos(2*(lat+lat0))\n\tM4 = (35/24)*n**3 * sin(3*(lat-lat0)) * cos(3*(lat+lat0))\n\t\n\t# meridional arc\n\tM = b * F0 * (M1 - M2 + M3 - M4)\n\t\n\tI = M + N0\n\tII = nu*F0*sin(lat)*cos(lat)/2\n\tIII = nu*F0*sin(lat)*cos(lat)**3*(5 - tan(lat)**2 + 9*eta2)/24\n\tIIIA = nu*F0*sin(lat)*cos(lat)**5*(61 - 58*tan(lat)**2 + tan(lat)**4)/720\n\tIV = nu*F0*cos(lat)\n\tV = nu*F0*cos(lat)**3*(nu/rho - tan(lat)**2)/6\n\tVI = nu*F0*cos(lat)**5*(5 - 18*tan(lat)**2 + tan(lat)**4 + 14*eta2 - 58*eta2*tan(lat)**2)/120\n\t\n\tN = I + II*(lon-lon0)**2 + III*(lon-lon0)**4 + IIIA*(lon-lon0)**6\n\tE = E0 + IV*(lon-lon0) + V*(lon-lon0)**3 + VI*(lon-lon0)**5\n\t\n\t# Job's a good'n.\n\treturn E, N",
"def packet_to_kml(packet, reader):\n\n try:\n src_ip = packet[IP].src\n src_kml = ip_to_kml(src_ip, reader)\n except:\n src_kml = None\n try:\n dest_ip = packet[IP].dest\n dest_kml = ip_to_kml(dest_ip, reader)\n except:\n dest_kml = None\n\n if src_kml is not None and dest_kml is not None:\n connect_kml = ips_to_line_kml(src_ip, dest_ip, reader)\n print(\"Added connection\")\n else:\n connect_kml = None\n\n return src_kml, dest_kml, connect_kml",
"def main():\n input_file_path = sys.argv[1]\n output_file_path = sys.argv[2]\n gps_df = create_df(input_file_path) # creates a data frame\n gps_df = clean_data(gps_df) # cleans the data\n print('Cleaning done')\n write_to_kml(gps_df, output_file_path) # writes to kml file",
"def build_asop_dict(filename):\n cube = iris.load_cube(filename)\n\n # Look for name keys. Default first 15 characters of filename\n name = filename.split('/')[-1][0:15]\n for key in ['source_id','source_label','short_name','name','long_name']:\n if key in cube.attributes:\n name = cube.attributes[key]\n break\n if 'variant_label' in cube.attributes:\n name += ('_' + cube.attributes['variant_label'])\n\n constraint = cube.standard_name\n\n # Get coordinate deltas\n t1 = cube.coords('time')[0][0].cell(0).point\n t2 = cube.coords('time')[0][1].cell(0).point\n # Try assuming datetime object, otherwise int\n try:\n dt = int((t2 - t1).total_seconds())\n except AttributeError: # assume units of days\n dt = int((t2-t1)*60*60*24)\n print('Warning: Time units not found. Units assumed to be \"days\"')\n\n # Estimate average grid spacing in km\n dims=[cube.dim_coords[n].standard_name for n in range(0,len(cube.dim_coords))]\n dim_name_lat = None\n dim_name_lon = None\n for dim in dims:\n if 'lat' in dim.lower():\n dim_name_lat = dim\n elif 'lon' in dim.lower():\n dim_name_lon = dim\n if (dim_name_lat is None) or (dim_name_lon is None):\n raise RuntimeError(filename+': latitude or longitude dimension not found.\\n'\n + 'Valid latitude names contain \"lat\" and '\n + 'valid longitude names contain \"lon\"')\n deltas = {}\n for dvar,coord in zip(['dy','dx'],[dim_name_lat,dim_name_lon]):\n if cube.coords(coord)[0].is_monotonic():\n coord_list = cube.coords(coord)[0].points\n # Estimating at equator for now\n deltas[dvar] = np.absolute(np.mean(np.diff(coord_list))*110)\n\n # get time descriptions\n if (dt > 86399) and (dt < 86401):\n # This is a day, with some room for error\n time_type = 'day'\n time_desc = 'daily'\n elif dt >= 86401:\n days = round(dt/(60*60*24))\n time_type = str(days) + 'day'\n time_desc = str(days) + '-day'\n elif (dt > 10799) and (dt < 10801):\n time_type = '3hr'\n time_desc = '3-hourly'\n elif (dt > 3599) and (dt < 3601):\n time_type = '1hr'\n time_desc = 'hourly'\n elif dt <= 3599:\n minutes = round(dt/60)\n time_type = str(minutes) + 'min'\n time_desc = str(minutes) + '-min'\n elif dt <= 86399:\n # catch other hour lengths\n hours = round(dt/60*60)\n time_type = str(hours) + 'hour'\n time_desc = str(hours) + '-hourly'\n\n # Set scale factor, starting with some common units\n pr_units = cube.units\n scale_factor = 1\n if pr_units == Unit('mm'):\n scale_factor = round(86400 / dt)\n elif pr_units == Unit('kg m-2 s-1'):\n scale_factor = 86400\n else:\n try:\n # Find conversion factor between 1 in dataset's units\n # and a \"benchmark\" unit\n bm = Unit('kg m-2 day-1')\n scale_factor = pr_units.convert(1,bm)\n except ValueError:\n try:\n bm = Unit('mm day-1')\n scale_factor = pr_units.convert(1,bm)\n except ValueError:\n print(\"Warning: Could not determine scale factor. Using default of \"+scale_factor)\n\n asop_dict = {}\n asop_dict['infile'] = filename\n asop_dict['name'] = name\n asop_dict['dt'] = dt\n asop_dict['dx'] = deltas['dx']\n asop_dict['dy'] = deltas['dy']\n asop_dict['constraint'] = constraint\n asop_dict['scale_factor'] = scale_factor\n asop_dict['legend_name'] = ''\n asop_dict['region'] = ''\n asop_dict['box_size'] = 7*deltas['dx']\n asop_dict['color'] = ''\n asop_dict['region_size'] = 7\n asop_dict['lag_length'] = 6\n asop_dict['grid_type'] = ''\n asop_dict['time_type'] = time_type\n asop_dict['grid_desc'] = ''\n asop_dict['time_desc'] = time_desc\n asop_dict['autocorr_length'] = 8*dt\n\n return asop_dict",
"def geo_transform(self):\n pass",
"def to_wkt(self):\n return _property_op(arctern.ST_AsText, self)",
"def ips_to_line_kml(ip1, ip2, reader):\n\n res1 = reader.city(ip1)\n long1 = res1.location.longitude\n lat1 = res1.location.latitude\n\n res2 = reader.city(ip2)\n long2 = res2.location.longitude\n lat2 = res2.location.latitude\n\n kml = (\n \"<Placemark>\\n\"\n \"<name>%s connecting to %s</name>\\n\"\n \"<LineString>\\n\"\n \"<extrude>1</extrude>\\n\"\n \"<tessellate>1</tessellate>\\n\"\n \"<altitudeMode>relativeToGround</altitudeMode>\\n\"\n \"<coordinates>\\n\"\n \"%6f,%6f,50 %6f,%6f,50\\n\"\n \"</coordinates>\\n\"\n \"</LineString>\\n\"\n \"</Placemark>\\n\"\n ) % (ip1, ip2, long1, lat1, long2, lat2)\n\n return kml",
"def make_model_geocsv():\n\n model_file, base_file_name = check_netcdf_file()\n print('[INFO] Input netCDF File: {}'.format(model_file), flush=True)\n\n data_header = list()\n model_data = Dataset(model_file)\n try:\n # conversion to string is done to preserve precision\n lat = list()\n lon = list()\n depth = list()\n for this_value in model_data.variables[LAT_VARIABLE][:]:\n lat.append(\"{}\".format(str(this_value)))\n for this_value in model_data.variables[LON_VARIABLE][:]:\n lon.append(\"{}\".format(str(this_value)))\n for this_value in model_data.variables[DEPTH_VARIABLE][:]:\n depth.append(\"{}\".format(str(this_value)))\n except Exception:\n print('\\n[Error] the expected variables ({}, {}, {}) not in the variable list: {}\\n'.format(\n LAT_VARIABLE, LON_VARIABLE, DEPTH_VARIABLE, str(list(model_data.variables.keys()))))\n sys.exit(1)\n\n emcin = {}\n\n # make sure this is a 3D netCDF file\n var_3d = list()\n for var in model_data.variables.keys():\n if len(model_data.variables[var].shape) == 3:\n var_3d.append(var)\n if len(var_3d) <= 0:\n print('\\n[ERROR] not a 3D netCDF file\\n\\n', flush=True)\n sys.exit(1)\n\n # the standard order is (Z, Y, X) or (depth, latitude, longitude)\n if DEBUG:\n print('[INFO] Mode: {}'.format(OUTPUT_MODE), flush=True)\n print('[INFO] 3D Variables: {}'.format(var_3d), flush=True)\n\n if VIEW_HEADER:\n display_headers(model_file, model_data)\n sys.exit(0)\n\n output_data = list()\n for k, this_depth in enumerate(depth):\n if OUTPUT_MODE == 'single' and k == 0:\n data_header = list()\n output_file = '{}.csv'.format(base_file_name)\n fp = open(output_file, 'w')\n print('[INFO] Output file: {}'.format(output_file), flush=True)\n fp.write(get_model_header(model_file, model_data))\n data_header.append('{}{}{}{}{}'.format(LAT_VARIABLE, DELIMITER, LON_VARIABLE,\n DELIMITER, DEPTH_VARIABLE))\n elif OUTPUT_MODE == 'depth':\n output_data = list()\n data_header = list()\n output_file = os.path.join(\n '{}_{}_{}.csv'.format(base_file_name, this_depth, VALID_MODES[OUTPUT_MODE]))\n fp = open(output_file, 'w')\n print('[INFO] Output file: {}'.format(output_file), flush=True)\n fp.write(get_model_header(model_file, model_data))\n data_header.append('# depth: {}\\n'.format(this_depth))\n data_header.append('{}{}{}'.format(LAT_VARIABLE, DELIMITER, LON_VARIABLE))\n\n if DEBUG:\n print('[INFO] Processing depth: {}'.format(this_depth), flush=True)\n else:\n dot()\n\n index = [-1, -1, -1]\n for i, this_lat in enumerate(lat):\n for j, this_lon in enumerate(lon):\n if OUTPUT_MODE == 'single':\n output_data.append('{}{}{}{}{}'.format(str(this_lat), DELIMITER, str(this_lon), DELIMITER,\n str(this_depth)))\n else:\n output_data.append('{}{}{}'.format(str(this_lat), DELIMITER, str(this_lon)))\n\n for var in model_data.variables.keys():\n depth_index = None\n lat_index = None\n lon_index = None\n if var.encode('ascii', 'ignore').decode(\"utf-8\") not in [LAT_VARIABLE, LON_VARIABLE,\n DEPTH_VARIABLE]:\n if ((OUTPUT_MODE == 'single' and (not i and not j and not k)) or\n (OUTPUT_MODE == 'depth' and (not i and not j))):\n fp.write(get_var_header(model_data, var))\n data_header.append('{}{}'.format(DELIMITER, var))\n # find the variable ordering\n if lat_index is None:\n for l in range(len(model_data.variables[var].dimensions)):\n if model_data.variables[var].dimensions[l].encode('ascii', 'ignore').decode(\n \"utf-8\") == DEPTH_VARIABLE:\n depth_index = l\n elif model_data.variables[var].dimensions[l].encode('ascii', 'ignore').decode(\n \"utf-8\") == LON_VARIABLE:\n lon_index = l\n else:\n lat_index = l\n\n if var not in emcin.keys():\n try:\n emcin[var] = model_data.variables[var][:]\n except Exception as err:\n print('\\n[Error] problem reading variable \"{}\"'.format(var))\n print('{0}\\n'.format(err))\n sys.exit(2)\n\n index[depth_index] = k\n index[lat_index] = i\n index[lon_index] = j\n # nan values, otherwise we write string to preserve the precision\n if str(emcin[var][index[0]][index[1]][index[2]]) == '--':\n output_data.append('{}{}'.format(DELIMITER,\n float(emcin[var][index[0]][index[1]][index[2]])))\n else:\n # conversion to string is done to preserve precision\n output_data.append('{}{}'.format(DELIMITER,\n str(emcin[var][index[0]][index[1]][index[2]])))\n output_data.append('\\n')\n if OUTPUT_MODE == 'depth':\n fp.write('{}\\n'.format(''.join(data_header)))\n fp.write(''.join(output_data))\n fp.close()\n\n if OUTPUT_MODE == 'single':\n fp.write('{}\\n'.format(''.join(data_header)))\n fp.write(''.join(output_data))\n fp.close()",
"def site2nrml(model, params_dict): \n \"\"\"\n # Some XML definitions\n NAMESPACE = 'http://openquake.org/xmlns/nrml/0.4'\n GML_NAMESPACE = 'http://www.opengis.net/gml'\n SERIALIZE_NS_MAP = {None: NAMESPACE, 'gml': GML_NAMESPACE} \n gml_ns = SERIALIZE_NS_MAP['gml']\n \"\"\"\n \n # Head matter \n root = etree.Element(_tag='nrml', nsmap={'gml': 'http://www.opengis.net/gml'})\n root.set('xmlns', 'http://openquake.org/xmlns/nrml/0.4')\n root.append(etree.Comment('%s' % '%s site model' %(model)))\n \n\n # Define Site Model Name \n sMod = etree.SubElement(root, \"siteModel\")\n sMod.set('name', model + ' Site Model')\n \n # Define sub element\n \n for key in params_dict:\n \n site = etree.SubElement(sMod, \"site\")\n site.set('lon', '%s' % key[0])\n site.set('lat', '%s' % key[1])\n site.set('vs30', '%s' % params_dict[key][0])\n site.set('vs30Type', '%s' % 'inferred')\n site.set('z1pt0', '%s' % '%3.3f' % float(params_dict[key][1]))\n site.set('z2pt5', '%s' % '%3.3f' % float(params_dict[key][2]))\n \n #print(getMinMax(params_dict))\n \n # Form tree and write to xml\n root_tree = etree.ElementTree(root)\n outFile = open((out_directory + '/' + out_filename), 'wb')\n root_tree.write(outFile, encoding=\"utf-8\", xml_declaration=True, pretty_print=True)",
"def __init__(self, gridname=None, verbose=False):\n self.gridname = gridname\n g = re.match(r'(EASE2_[NST])([0-9\\.]+)km', gridname)\n if g is None:\n print(\"%s : error parsing gridname %s\" % (__name__, gridname),\n file=sys.stderr,\n flush=True)\n raise ValueError\n projection = g.group(1)\n resolution = g.group(2)\n\n # Check for typos in resolution\n if resolution not in resolutions:\n print(\"%s : unrecognized resolution %s\" % (__name__, resolution),\n file=sys.stderr,\n flush=True)\n raise ValueError\n\n # The geotransform information\n # is the set of GDAL affine transform parameters:\n # (map_UL_x, scale_x, b, map_UL_y, d, scale_y)\n if projection == \"EASE2_N\":\n # The geotransform is the set of GDAL affine transform parameters:\n # (map_UL_x, scale_x, b, map_UL_y, d, scale_y)\n self.proj4text = \"+proj=laea +lat_0=90 +lon_0=0 \" + \\\n \"+x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m\"\n self.map_UL_x = -9000000.\n self.map_UL_y = 9000000.\n self.b = 0.\n self.d = 0.\n self.scale_x = float(resolution) * m_per_km\n self.scale_y = -1 * float(resolution) * m_per_km\n\n elif projection == \"EASE2_S\":\n self.proj4text = \"+proj=laea +lat_0=-90 +lon_0=0 \" + \\\n \"+x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m\"\n self.map_UL_x = -9000000.\n self.map_UL_y = 9000000.\n self.b = 0.\n self.d = 0.\n self.scale_x = float(resolution) * m_per_km\n self.scale_y = -1 * float(resolution) * m_per_km\n\n elif projection == \"EASE2_T\":\n self.proj4text = \"+proj=cea +lat_0=0 +lon_0=0 +lat_ts=30 \" \\\n \"+x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m\"\n self.map_UL_x = -17367530.44\n self.map_UL_y = 6756820.20000\n self.b = 0.\n self.d = 0.\n base_resolution_m = 25025.26000\n factor = resolutions.index(resolution)\n self.scale_x = base_resolution_m / (2. ** factor)\n self.scale_y = -1 * base_resolution_m / (2. ** factor)\n\n else:\n print(\"%s : unrecognized projection %s\" % (__name__, projection),\n file=sys.stderr,\n flush=True)\n raise ValueError\n\n # Thanks to affine help pages at\n # https://github.com/sgillies/affine/blob/master/README.rst\n # http://www.perrygeo.com/python-affine-transforms.html\n geotransform = (self.map_UL_x + self.scale_x / 2.,\n self.scale_x,\n self.b,\n self.map_UL_y + self.scale_y / 2.,\n self.d,\n self.scale_y)\n self.fwd = Affine.from_gdal(*geotransform)\n\n # Initialize and save coordinate transformation\n # for this projection\n self.gridSpatialRef = osr.SpatialReference()\n self.gridSpatialRef.SetFromUserInput(self.proj4text)\n\n # Initialize and save coordinate transformation\n # for EPSG4326 (lat/lon)\n self.epsg4326SpatialRef = osr.SpatialReference()\n self.epsg4326SpatialRef.SetFromUserInput(self.epsg4326Proj4text)\n\n # Initialize and save the forward and reverse transformations\n self.projToGeog = osr.CoordinateTransformation(\n self.gridSpatialRef, self.epsg4326SpatialRef)\n self.geogToProj = osr.CoordinateTransformation(\n self.epsg4326SpatialRef, self.gridSpatialRef)\n\n if verbose:\n print(\"%s : initialized new Ease2Transform object\" % (__name__),\n file=sys.stderr,\n flush=True)",
"def generate_lookat_kml_block(self, lng, lat, viewrange):\n return \"\"\"\n <LookAt>\n <longitude>%.14f</longitude>\n <latitude>%.14f</latitude>\n <altitude>0</altitude>\n <range>%.f</range>\n <tilt>0</tilt>\n <heading>0</heading>\n </LookAt>\n\"\"\" % (lng, lat, viewrange)",
"def box2kml(xy,fname=None,name='box',color='FF0000',width=3,verbose=True):\n\n if fname is None:\n fname = name + '.kml'\n\n if type(xy[0]) is tuple:\n x1,x2 = xy[0]\n y1,y2 = xy[1]\n else:\n x1,x2,y1,y2 = xy[0:]\n\n if verbose:\n print(\"Box: %10.6f %10.6f %10.6f %10.6f\" % (x1,x2,y1,y2))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = name\n mapping['desc'] = \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\" % (f2s(y1),f2s(y2))\n mapping['color'] = color\n mapping['width'] = width\n\n region_text = kml_region(mapping)\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def convert_kcalmol_kJmol(en_kcalmol):\n return en_kcalmol/kJmol_kcalmol",
"def GEEnasaNEXGDDP(ptsFile,metric,timeStep,startYear,endYear,scenarios,buf,poly,username,folderOut,models = ['ACCESS1-0', 'bcc-csm1-1', 'BNU-ESM',\n 'CanESM2', 'CCSM4', 'CESM1-BGC', 'CNRM-CM5', 'CSIRO-Mk3-6-0',\n 'GFDL-CM3', 'GFDL-ESM2G', 'GFDL-ESM2M', 'inmcm4', 'IPSL-CM5A-LR',\n 'IPSL-CM5A-MR', 'MIROC-ESM', 'MIROC-ESM-CHEM', 'MIROC5', 'MPI-ESM-LR',\n 'MPI-ESM-MR', 'MRI-CGCM3', 'NorESM1-M'], scalePix = 25000):\n \n # load required libraries\n import ee\n\n # Initialize the Earth Engine object, using the authentication credentials.\n ee.Initialize()\n\n ID_field = \"geeID\"\n\n #load pts or poly file\n pts1 = ee.FeatureCollection('users/' + username + '/' + str(ptsFile))\n\n time_d = {}\n time_d['day'] = 'projd'\n time_d['month'] = 'projm'\n time_d['year'] = 'projy'\n \n for met in metric:\n\n for scenario in scenarios:\n\n for model in models:\n\n NEX = (ee.ImageCollection('NASA/NEX-GDDP')\n .select(met)\n .filterMetadata('model', 'equals', model)\n .filterMetadata('scenario', 'equals', scenario))\n\n metL = [met]\n \n years = list(range(startYear, endYear + 1))\n monthsEE = ee.List(list(range(0,(12*len(years)))))\n yearsEE = ee.List(years)\n\n######Turned off unit conversion, because it fails when there are too many pts\n## if (met == 'pr'):\n##\n## def Scale1(img):\n## return (img.float()\n## .multiply(86400)\n## .copyProperties(img,['system:time_start','system:time_end']))\n##\n## NEX = NEX0.map(Scale1)\n## \n## elif any([(met == 'tasmin'),(met == 'tasmax')]):\n##\n## def KtoC(img):\n## return (img.float()\n## .subtract(273.15)\n## .copyProperties(img,['system:time_start','system:time_end']))\n##\n## NEX = NEX0.map(KtoC)\n \n if all([(timeStep == 'year'),any([(met == 'tasmin'),(met == 'tasmax')])]):\n\n def map_m(i):\n i = ee.Number(i).int()\n image2 = (NEX\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .first())\n filtered = (NEX\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .mean()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(yearsEE.map(map_m).flatten())\n\n elif all([(timeStep == 'month'),any([(met == 'tasmin'),(met == 'tasmax')])]):\n \n def map_m(i):\n i = ee.Number(i)\n y = i.divide(12).add(years[0]).int()\n m = i.mod(12).add(1)\n image2 = (NEX\n .filter(ee.Filter.calendarRange(m, m, 'month'))\n .filter(ee.Filter.calendarRange(y, y, 'year'))\n .first())\n filtered = (NEX\n .filter(ee.Filter.calendarRange(m, m, 'month'))\n .filter(ee.Filter.calendarRange(y, y, 'year'))\n .mean()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(monthsEE.map(map_m).flatten())\n\n elif all([(timeStep == 'year'),(met == 'pr')]):\n\n def map_m(i):\n i = ee.Number(i).int()\n image2 = (NEX\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .first())\n filtered = (NEX\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .sum()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(yearsEE.map(map_m).flatten())\n\n elif all([(timeStep == 'month'),(met == 'pr')]):\n \n def map_m(i):\n i = ee.Number(i)\n y = i.divide(12).add(years[0]).int()\n m = i.mod(12).add(1)\n image2 = (NEX\n .filter(ee.Filter.calendarRange(m, m, 'month'))\n .filter(ee.Filter.calendarRange(y, y, 'year'))\n .first())\n filtered = (NEX\n .filter(ee.Filter.calendarRange(m, m, 'month'))\n .filter(ee.Filter.calendarRange(y, y, 'year'))\n .sum()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(monthsEE.map(map_m).flatten())\n\n elif timeStep == 'day':\n\n img_col = NEX.filter(ee.Filter.calendarRange(startYear, endYear, 'year'))\n\n #else:\n #print(\"incorrect time step specified\")\n \n if buf > 0:\n bufL = [buf]\n def bufferPoly(feature):\n return feature.buffer(bufL[0])\n\n ptsB = pts1.map(bufferPoly)\n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = ptsB.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_NEX_'+str(met)+'_'+scenario+'_'+model+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_ptsB',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('buffered pts by:' + str(buf) + ' for NEX: ' + met + ' ' + scenario + ' ' + model)\n\n elif poly > 0:\n \n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_NEX_'+str(met)+'_'+scenario+'_'+model+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_poly1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('spatial mean in poly: no buffer for NEX: ' + met + ' ' + scenario + ' ' + model)\n\n else:\n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_NEX_'+str(met)+'_'+scenario+'_'+model+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_pts1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n #print('value at point: no buffer for NEX: ' + met + ' ' + scenario + ' ' + model)",
"def generate_leaf_kml(self, d, content=\"\"):\n return (\"\"\"\\\n <Folder>\n <Region>\n <Lod>\n <minLodPixels>%(minlodpixels)d</minLodPixels>\n <maxLodPixels>%(maxlodpixels)d</maxLodPixels>\n </Lod>\n <LatLonAltBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonAltBox>\n </Region>\n <GroundOverlay>\n <drawOrder>%(draw_order)d</drawOrder>\n <Icon>\n <href>%(image_url)s</href>\n </Icon>\n <LatLonBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonBox>\n </GroundOverlay>\"\"\" % d\n + \"\"\"\\\n%s\n </Folder>\"\"\" % content)",
"def read_smx_fmv_12(eps_file):\n raw_data = eps_file.scaled_mdr\n raw_unscaled = eps_file.mdr\n\n n_node_per_line = raw_data[\"LONGITUDE\"].shape[1]\n n_lines = raw_data[\"LONGITUDE\"].shape[0]\n n_records = eps_file.mdr_counter * n_node_per_line\n idx_nodes = np.arange(eps_file.mdr_counter).repeat(n_node_per_line)\n\n data = {}\n metadata = {}\n\n metadata[\"spacecraft_id\"] = np.int8(eps_file.mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(eps_file.mphr[\"ORBIT_START\"])\n\n ascat_time = shortcdstime2jd(raw_data[\"UTC_LINE_NODES\"].flatten()[\"day\"],\n raw_data[\"UTC_LINE_NODES\"].flatten()[\"time\"])\n data[\"jd\"] = ascat_time[idx_nodes]\n\n fields = [(\"sigma0_trip\", long_nan), (\"inc_angle_trip\", uint_nan),\n (\"azi_angle_trip\", int_nan), (\"kp\", uint_nan),\n (\"f_land\", uint_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].reshape(n_records, 3)\n valid = raw_unscaled[f.upper()].reshape(n_records, 3) != nan_val\n data[f][~valid] = nan_val\n\n fields = [\"sat_track_azi\", \"abs_line_number\"]\n for f in fields:\n data[f] = raw_data[f.upper()].flatten()[idx_nodes]\n\n fields = [(\"longitude\", long_nan, long_nan),\n (\"latitude\", long_nan, long_nan),\n (\"swath_indicator\", byte_nan, byte_nan),\n (\"soil_moisture\", uint_nan, uint_nan),\n (\"soil_moisture_error\", uint_nan, uint_nan),\n (\"sigma40\", long_nan, long_nan),\n (\"sigma40_error\", long_nan, long_nan),\n (\"slope40\", long_nan, long_nan),\n (\"slope40_error\", long_nan, long_nan),\n (\"dry_backscatter\", long_nan, long_nan),\n (\"wet_backscatter\", long_nan, long_nan),\n (\"mean_surf_soil_moisture\", uint_nan, uint_nan),\n (\"soil_moisture_sensetivity\", ulong_nan, float32_nan),\n (\"correction_flags\", uint8_nan, uint8_nan),\n (\"processing_flags\", uint8_nan, uint8_nan),\n (\"aggregated_quality_flag\", uint8_nan, uint8_nan),\n (\"snow_cover_probability\", uint8_nan, uint8_nan),\n (\"frozen_soil_probability\", uint8_nan, uint8_nan),\n (\"innudation_or_wetland\", uint8_nan, uint8_nan),\n (\"topographical_complexity\", uint8_nan, uint8_nan)]\n\n for f, nan_val, new_nan_val in fields:\n data[f] = raw_data[f.upper()].flatten()\n valid = raw_unscaled[f.upper()].flatten() != nan_val\n data[f][~valid] = new_nan_val\n\n # sat_track_azi (uint)\n data[\"as_des_pass\"] = \\\n np.array(raw_data[\"SAT_TRACK_AZI\"].flatten()[idx_nodes] < 270)\n\n # modify longitudes from [0,360] to [-180,180]\n mask = np.logical_and(data[\"longitude\"] != long_nan,\n data[\"longitude\"] > 180)\n data[\"longitude\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n mask = (data[\"azi_angle_trip\"] != int_nan) & (data[\"azi_angle_trip\"] < 0)\n data[\"azi_angle_trip\"][mask] += 360\n\n fields = [\"param_db_version\", \"warp_nrt_version\"]\n for f in fields:\n data[f] = raw_data[\"PARAM_DB_VERSION\"].flatten()[idx_nodes]\n\n metadata[\"spacecraft_id\"] = int(eps_file.mphr[\"SPACECRAFT_ID\"][2])\n\n data[\"node_num\"] = np.tile((np.arange(n_node_per_line) + 1), n_lines)\n\n data[\"line_num\"] = idx_nodes\n\n return data, metadata",
"def saveKML(kmlFile):\n\n tilePath = os.path.basename('map-NYC_heatmap.png')\n north = topLeftLat\n south = bottomRightLat\n east = topLeftLon\n west = bottomRightLon\n \n bytes = KML % (tilePath, north, south, east, west)\n file(kmlFile, \"w\").write(bytes)",
"def wkt(self): # -> str:\n ...",
"def convertIdToName(mylegend, shapelayer):\n # Check for editing rights (capabilities)\n caps = shapelayer.dataProvider().capabilities()\n\n #Create field for storing Landuse label (String)\n newColumn (shapelayer,\"Landuse\", QVariant.String)\n\n print(\"Starting iterating over Features\")\n features = shapelayer.getFeatures()\n # Get field ID of landuse nr\n luINTFieldID = shapelayer.fields().indexFromName(\"LUNrInt\")\n # Initiate a variable to hold the attribute values\n updates = {}\n i = 0\n # iterate over features\n for feat in features:\n luNameFieldID = shapelayer.fields().indexFromName(\"Landuse\")\n intLU = feat[luINTFieldID]\n stringLU = \"NOT FOUND\"\n for row in mylegend:\n luID = row[0]\n #print(\"{} is a string! And will be converted to :{}\".format(luID, int(luID)))\n if(intLU==int(luID)):\n stringLU = row[1]\n break\n #print(\"YUHUU! FOUND: {}\".format(row[1]))\n updates[feat.id()] = {luNameFieldID: stringLU}\n # Use the created dictionary to update the field for all features\n shapelayer.dataProvider().changeAttributeValues(updates)\n # Update to propagate the changes\n shapelayer.updateFields()",
"def __load_topography__(filepath):\n\tfrom clawpack.geoclaw import topotools\n\ttopo = topotools.Topography(filepath)\n\t\n\tif TESTING:\n\t\timport matplotlib.pyplot as plt\n\t\ttopo.plot()\n\t\tplt.show()\n\ttopo.topo_type = 3\n\txgrid = topo.X\n\tygrid = topo.Y\n\tzgrid = topo.Z\n\t\n\t#temp; find a better solution (e.g. convert from lat/lon to actual space)\n\t#xgrid = 1.e4 * xgrid\n\t#ygrid = 1.e4 * ygrid\n\t\n\t#test only\n\tshape = zgrid.shape\n\tny, nx = shape[0], shape[1]\n\t#for iy in range(0,ny):\n\t\t#zgrid[iy, 0] = zgrid[iy,0]+1e4\n\t#for ix in range(0,nx):\n\t\t#zgrid[1, ix] = zgrid[1,ix]-1e4\n\t\n\tdef wavy(x, y):\n\t\treturn np.sin(0.2*np.pi*x)*np.cos(0.4*np.pi*y)\n\t\n\twavyz = wavy(xgrid, ygrid)\n\t\n\t\n\tfor ix in range(0,0):\n\t\tfor iy in range(0,0):\n\t\t\tzgrid[iy, ix] = 1e4*wavyz[iy, ix]\n\t\n\tzgrid = 1e-4 * zgrid\n\t\n\treturn (xgrid, ygrid, zgrid)",
"def GEEsmos(ptsFile,metric,timeStep,buf,poly,username,folderOut, scalePix = 25000,startYear = None,endYear = None):\n \n # load required libraries\n import ee\n \n # Initialize the Earth Engine object, using the authentication credentials.\n ee.Initialize()\n\n ID_field = \"geeID\"\n\n #load pts or poly file\n pts1 = ee.FeatureCollection('users/' + username + '/' + str(ptsFile))\n\n time_d = {}\n time_d['lowest'] = 'rl'\n time_d['month'] = 'rm'\n time_d['year'] = 'ry'\n\n lastImage = ee.Image(ee.ImageCollection('NASA_USDA/HSL/soil_moisture')\n .sort('system:time_start',False)\n .first())\n lastImageDate = lastImage.get('system:index').getInfo()\n\n firstImage = ee.Image(ee.ImageCollection('NASA_USDA/HSL/soil_moisture')\n .sort('system:time_start',True)\n .first())\n firstImageDate = firstImage.get('system:index').getInfo()\n \n #startMonth - 1, because time-series starts on Jan 1\n #startYearAll: did't add one, for same reason\n if all([startYear is None,endYear is None]):\n startYear = int(firstImageDate[(len(firstImageDate)-8):(len(firstImageDate)-4)])\n endYear = int(lastImageDate[(len(lastImageDate)-8):(len(lastImageDate)-4)])\n startMonth = int(firstImageDate[(len(firstImageDate)-4):(len(firstImageDate)-2)])-1\n endMonth = int(lastImageDate[(len(lastImageDate)-4):(len(lastImageDate)-2)])-1\n startYearAll = startYear\n endYearAll = endYear - 1\n \n years = list(range(startYear, endYearAll + 1))\n monthsEE = ee.List(list(range(startMonth,(12*len(years)+endMonth))))\n yearsEE = ee.List(list(range(startYearAll, endYearAll + 1)))\n \n elif all([startYear >= 0,endYear >= 0]):\n startYearReal = int(firstImageDate[(len(firstImageDate)-8):(len(firstImageDate)-4)])\n endYearReal = int(lastImageDate[(len(lastImageDate)-8):(len(lastImageDate)-4)]) \n \n years = list(range(max(startYearReal,startYear), (min(endYearReal,endYear) + 1)))\n \n if endYear >= endYearReal:\n endMonth = int(lastImageDate[(len(lastImageDate)-4):(len(lastImageDate)-2)])-1\n endYearReal2 = endYearReal-1\n years2 = len(years)-1\n elif endYear < endYearReal:\n endMonth = 0\n endYearReal2 = endYearReal\n years2 = len(years)\n \n if startYear <= startYearReal:\n startMonth = int(firstImageDate[(len(firstImageDate)-4):(len(firstImageDate)-2)])-1\n elif startYear > startYearReal:\n startMonth = 0\n \n monthsEE = ee.List(list(range(startMonth,(12*years2+endMonth))))\n yearsEE = ee.List(list(range(max(startYearReal,startYear), (min(endYearReal2,endYear) + 1))))\n \n for met in metric:\n SMOS = ee.ImageCollection('NASA_USDA/HSL/soil_moisture').select(met)\n metL = [met]\n \n if timeStep == 'year':\n\n def map_m(i):\n i = ee.Number(i).int()\n image2 = (SMOS\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .first())\n filtered = (SMOS\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .mean()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(yearsEE.map(map_m).flatten())\n\n elif timeStep == 'month':\n \n def map_m(i):\n i = ee.Number(i)\n y = i.divide(12).add(years[0]).int()\n m = i.mod(12).add(1)\n image2 = (SMOS\n .filter(ee.Filter.calendarRange(m, m, 'month'))\n .filter(ee.Filter.calendarRange(y, y, 'year'))\n .first())\n filtered = (SMOS\n .filter(ee.Filter.calendarRange(m, m, 'month'))\n .filter(ee.Filter.calendarRange(y, y, 'year'))\n .mean()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(monthsEE.map(map_m).flatten())\n\n elif all([timeStep == 'lowest',endYear is None, startYear is None]):\n\n img_col = SMOS\n \n elif all([timeStep == 'lowest',endYear > 0, startYear > 0]):\n\n img_col = SMOS.filter(ee.Filter.calendarRange(startYear, endYear, 'year'))\n\n #else:\n #print(\"incorrect time step specified\")\n \n if buf > 0:\n bufL = [buf]\n def bufferPoly(feature):\n return feature.buffer(bufL[0])\n\n ptsB = pts1.map(bufferPoly)\n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = ptsB.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_SMOS_'+str(met)+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_ptsB',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('buffered pts by:' + str(buf) + ' for SMOS: ' + met)\n\n elif poly > 0:\n \n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_SMOS_'+str(met)+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_poly1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('spatial mean in poly: no buffer for SMOS: ' + met)\n\n else:\n def table_m(image):\n table = (image\n .select(metL[0])\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_SMOS_'+str(met)+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_pts1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n #print('value at point: no buffer for SMOS: ' + met)",
"def data_airline():\n return load_airline()",
"def Infomap(pajek_string, *args, **kwargs):\n \n def _default_to_regular(d):\n \"\"\"Recursively convert nested defaultdicts to nested dicts.\n \"\"\"\n if isinstance(d, defaultdict):\n d = {k: _default_to_regular(v) for k, v in d.items()}\n return d\n \n def _get_id_to_label(filename):\n def __int_if_int(val):\n try: return int(val)\n except ValueError: return val\n with open('/tmp/input_infomap/' + filename + \".net\", 'r') as fp:\n parsed_network = fp.read()\n return dict(\n (int(n.split()[0]), __int_if_int(n.split('\"')[1]))\n for n in re.split(r\"\\*.+\", parsed_network)[1].split(\"\\n\")[1:-1]\n )\n \n def multilayer(id_to_label, filename):\n with open('/tmp/output_infomap/'+filename+\"_expanded.clu\", 'r') as infile:\n clusters = infile.read()\n\n # Get layers, nodes and clusters from _extended.clu file\n la_no_clu_flow = re.findall(r'\\d+ \\d+ \\d+ \\d.*\\d*', clusters) # [\"30 1 2 0.00800543\",...]\n la_no_clu_flow = [tuple(i.split()) for i in la_no_clu_flow]\n\n layer_node_flow_json = defaultdict(float) # {layer_node: flow, ...}\n node_flow_json = defaultdict(float) # {node: flow, ...}\n community_flow_json = defaultdict(float) # {community: flow, ...}\n communities_json = defaultdict(set) # {layer: {(node, cluster), ...}, ...}\n for layer, node, cluster, flow in la_no_clu_flow:\n layer_node_flow_json[\"%s_%s\" % (layer, id_to_label[int(node)])] += float(flow)\n node_flow_json[\"%s\" % (id_to_label[int(node)])] += float(flow)\n community_flow_json[cluster] += float(flow)\n communities_json[int(layer)].add((id_to_label[int(node)], int(cluster)))\n\n return communities_json, layer_node_flow_json, node_flow_json, community_flow_json\n \n def _parse_communities_planar(id_to_label, filename):\n with open('/tmp/output_infomap/'+filename+\".clu\", 'r') as infile:\n clusters = infile.read()\n \n # Get nodes and clusters from .clu file\n no_clu = [tuple(i.split()[:-1]) for i in re.findall(r\"\\d+ \\d+ \\d.*\\d*\", clusters)] # [(node, cluster), ...]\n return {0: set([(id_to_label[int(no)], int(clu)) for no, clu in no_clu])}\n \n def _clean_up(filename):\n subprocess.call(['rm', '/tmp/input_infomap/' + filename + '.net'])\n subprocess.call(['rm', '/tmp/output_infomap/' + filename + '_expanded.clu'])\n subprocess.call(['rm', '/tmp/output_infomap/' + filename + '.clu'])\n \n # Check for process id in args (for multiprocessing)\n if args[-1][:3] == \"pid\":\n pid = args[-1][3:]\n args = args[:-1]\n else:\n pid = \"\"\n\n # Try to make input_infomap and output_infomap folders in /tmp\n subprocess.call(['mkdir', '/tmp/input_infomap', '/tmp/output_infomap'])\n \n \n # Get network in multilayer string format and define filename\n filename = 'tmpnet' + pid\n\n # Store locally\n with open(\"/tmp/input_infomap/\"+filename+\".net\", 'w') as outfile:\n outfile.write(pajek_string)\n \n # Run Infomap for multilayer network\n subprocess.call(\n ['Infomap', '/tmp/input_infomap/'+filename+\".net\", '/tmp/output_infomap'] + \\\n list(args)\n )\n \n # Parse communities from Infomap output_infomap\n id_to_label = _get_id_to_label(filename)\n \n if 'multilayer' in list(args):\n parsed_communities, layer_node_flow, node_flow, community_flow = multilayer(id_to_label, filename)\n if 'pajek' in list(args):\n parsed_communities = _parse_communities_planar(id_to_label, filename)\n \n _clean_up(filename)\n\n # Produce layer communities\n layer_communities = {}\n for layer, group in list(parsed_communities.items()):\n communities = {}\n for no, clu in group: \n try:\n communities[clu-1].append(no)\n except KeyError:\n communities[clu-1] = [no]\n layer_communities[layer] = communities\n \n # Produce community_members\n community_members = defaultdict(Counter)\n for _, communities in list(layer_communities.items()):\n for c, members in list(communities.items()):\n community_members[c].update(members)\n\n return [\n _default_to_regular(community_members),\n layer_communities,\n _default_to_regular(layer_node_flow),\n _default_to_regular(node_flow),\n _default_to_regular(community_flow)\n ]",
"def make_e3sm_to_cmip_maps(config, logger, mesh_short_name, creation_date,\n ntasks):\n\n link_dir = '../assembled_files/diagnostics/maps'\n\n try:\n os.makedirs(link_dir)\n except FileExistsError:\n pass\n\n src_scrip_filename = 'ocean.scrip.nc'\n cmip6_grid_res = config.get('files_for_e3sm', 'cmip6_grid_res')\n if cmip6_grid_res == '180x360':\n dst_scrip_filename = 'cmip6_180x360_scrip.20181001.nc'\n elif cmip6_grid_res == '720x1440':\n dst_scrip_filename = 'cmip6_720x1440_scrip.20181001.nc'\n else:\n raise ValueError(f'Unexpected cmip6_grid_res: {cmip6_grid_res}')\n\n parallel_executable = config.get('parallel', 'parallel_executable')\n # split the parallel executable into constituents in case it includes flags\n parallel_command = parallel_executable.split(' ')\n parallel_system = config.get('parallel', 'system')\n if parallel_system == 'slurm':\n parallel_command.extend(['-n', f'{ntasks}'])\n elif parallel_system == 'single_node':\n if ntasks > 1:\n parallel_command.extend(['-n', f'{ntasks}'])\n else:\n raise ValueError(f'Unexpected parallel system: {parallel_system}')\n parallel_command = ' '.join(parallel_command)\n\n map_methods = dict(aave='conserve', mono='fv2fv_flx', nco='nco')\n for suffix, map_method in map_methods.items():\n local_map_filename = f'map_mpas_to_cmip6_{suffix}.nc'\n args = ['ncremap', f'--mpi_pfx={parallel_command}',\n f'--alg_typ={map_method}',\n f'--grd_src={src_scrip_filename}',\n f'--grd_dst={dst_scrip_filename}',\n f'--map={local_map_filename}']\n check_call(args, logger=logger)\n\n map_filename = \\\n f'map_{mesh_short_name}_to_cmip6_{cmip6_grid_res}_{suffix}.{creation_date}.nc' # noqa: E501\n\n symlink(os.path.abspath(local_map_filename),\n f'{link_dir}/{map_filename}')",
"def fix_greek_in_mathml(self, xml):\r\n def gettag(expr):\r\n return re.sub('{http://[^}]+}', '', expr.tag)\r\n\r\n for k in xml:\r\n tag = gettag(k)\r\n if tag == 'mi' or tag == 'ci':\r\n usym = unicode(k.text)\r\n try:\r\n udata = unicodedata.name(usym)\r\n except Exception:\r\n udata = None\r\n # print \"usym = %s, udata=%s\" % (usym,udata)\r\n if udata:\t\t\t# eg \"GREEK SMALL LETTER BETA\"\r\n if 'GREEK' in udata:\r\n usym = udata.split(' ')[-1]\r\n if 'SMALL' in udata:\r\n usym = usym.lower()\r\n #print \"greek: \",usym\r\n k.text = usym\r\n self.fix_greek_in_mathml(k)\r\n return xml",
"def make_ARI_list(dx, dy, m_info, offset):\n \"\"\"\n 1 Get information from m_info.\n \"\"\"\n x_m = m_info[0]\n y_m = m_info[1]\n z_m = m_info[2]\n\n m_points = m_info[3]\n\n m_p0 = m_points[0]\n m_p1 = m_points[1]\n m_p2 = m_points[2]\n m_p3 = m_points[3]\n\n \"\"\"\n 2 Get points of ARI.\n \"\"\"\n x_k = y_m * 2 / 3 # NOTE: fixed number\n\n # KUMIKI_points_left reflect offset\n p5 = (dx, dy)\n p4 = (dx, dy + y_m / 3 - offset)\n p3 = (dx + x_k, dy + y_m / 4 - offset)\n p2 = (dx + x_k, dy + 3 * y_m / 4 + offset)\n p1 = (dx, dy + 2 * y_m / 3 + offset)\n p0 = (dx, dy + y_m)\n\n KUMIKI_points_left = [p0, p1, p2, p3, p4, p5]\n\n # KUMIKI_points_right not reflect offset\n p5 = (dx, dy)\n p4 = (dx, dy + y_m / 3)\n p3 = (dx + x_k, dy + y_m / 4)\n p2 = (dx + x_k, dy + 3 * y_m / 4)\n p1 = (dx, dy + 2 * y_m / 3)\n p0 = (dx, dy + y_m)\n\n KUMIKI_points_right = [p0, p1, p2, p3, p4, p5]\n\n \"\"\"\n 3 Get SEN information.\n \"\"\"\n SEN_info = get_m2_m3_SEN_info(dx, dy, m_info, x_k)\n\n # upper shape\n upper_shape_left, upper_shape_right =\\\n m2_m3_make_upper_shape_points_list(dx, dy, m_info, SEN_info)\n\n upper_shape_left_upper_row = upper_shape_left[0]\n upper_shape_left_lower_row = upper_shape_left[1]\n\n upper_shape_right_upper_row = upper_shape_right[0]\n upper_shape_right_lower_row = upper_shape_right[1]\n\n # lower shape\n lower_shape_left, lower_shape_right =\\\n m2_m3_make_lower_shape_points_list(dx, dy, m_info, SEN_info)\n\n lower_shape_left_upper_row = lower_shape_left[0]\n lower_shape_left_lower_row = lower_shape_left[1]\n\n lower_shape_right_upper_row = lower_shape_right[0]\n lower_shape_right_lower_row = lower_shape_right[1]\n\n # middle shape\n middle_shape_left, middle_shape_right =\\\n m2_m3_make_middle_shape_points_list(dx, dy, m_info, SEN_info)\n\n middle_shape_left_upper_row = middle_shape_left[0]\n middle_shape_left_lower_row = middle_shape_left[1]\n\n middle_shape_right_upper_row = middle_shape_right[0]\n middle_shape_right_lower_row = middle_shape_right[1]\n\n \"\"\"\n 4 Make ARI lists\n \"\"\"\n # Leftside\n # Upper\n left_upper = []\n left_upper.append(m_p1)\n left_upper.extend(upper_shape_left_upper_row)\n\n left_upper.extend(KUMIKI_points_left)\n left_upper.extend(upper_shape_left_lower_row)\n left_upper.append(m_p0)\n\n # left_upper_crv = rs.AddPolyline(left_upper)\n\n # Middle\n left_middle = []\n left_middle.append(m_p1)\n left_middle.extend(middle_shape_left_upper_row)\n\n left_middle.extend(KUMIKI_points_left)\n left_middle.extend(middle_shape_left_lower_row)\n left_middle.append(m_p0)\n\n # left_middle_crv = rs.AddPolyline(left_middle)\n\n # Lower\n left_lower = []\n left_lower.append(m_p1)\n left_lower.extend(lower_shape_left_upper_row)\n\n left_lower.extend(KUMIKI_points_left)\n left_lower.extend(lower_shape_left_lower_row)\n left_lower.append(m_p0)\n\n # left_lower_crv = rs.AddPolyline(left_lower)\n\n # left_crvs = [left_upper_crv, left_middle_crv, left_lower_crv]\n\n left_list = [left_upper, left_middle, left_lower]\n\n # Rightside\n # Upper\n right_upper = []\n right_upper.append(m_p2)\n right_upper.extend(upper_shape_right_upper_row)\n\n right_upper.extend(KUMIKI_points_right)\n right_upper.extend(upper_shape_right_lower_row)\n right_upper.append(m_p3)\n\n # right_upper_crv = rs.AddPolyline(right_upper)\n\n # Middle\n right_middle = []\n right_middle.append(m_p2)\n right_middle.extend(middle_shape_right_upper_row)\n\n right_middle.extend(KUMIKI_points_right)\n right_middle.extend(middle_shape_right_lower_row)\n right_middle.append(m_p3)\n\n # right_middle_crv = rs.AddPolyline(right_middle)\n\n # Lower\n right_lower = []\n right_lower.append(m_p2)\n right_lower.extend(lower_shape_right_upper_row)\n\n right_lower.extend(KUMIKI_points_right)\n right_lower.extend(lower_shape_right_lower_row)\n right_lower.append(m_p3)\n\n # right_lower_crv = rs.AddPolyline(right_lower)\n\n # right_crvs = [right_upper_crv, right_middle_crv, right_lower_crv]\n\n right_list = [right_upper, right_middle, right_lower]\n\n return left_list, right_list, SEN_info",
"def produce_output_txt(self):\n\n NAME = \"TODO get name form cpacs object\"\n\n result_dir = get_results_directory(\"WeightConventional\")\n\n output_file = Path(result_dir, \"Aircraft_Geometry.out\")\n\n OutputTextFile = open(output_file, \"w\")\n\n OutputTextFile.write(\"\\n#################################################\")\n OutputTextFile.write(\"\\n###### AIRCRAFT GEOMETRY EVALUATION MODULE ######\")\n OutputTextFile.write(\"\\n###### OUTPUTS ######\")\n OutputTextFile.write(\"\\n#################################################\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nAircraft: \" + NAME)\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nGeometry Evaluations-----------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nUSEFUL INFO -------------------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\n \"\\nIf fuselage or wing number is greater than 1 the\\n\"\n \"information of each obj are listed in an \"\n \"array ordered\\nprogressively\"\n )\n OutputTextFile.write(\n \"\\nSymmetry output: 0 = no symmetry, 1 = x-y,\\n\" + \"2 = x-z, 3 = y-z planes\"\n )\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nRESULTS -----------------------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nFUSELAGE ----------------------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(f\"\\nNumber of fuselage sections [-]: {self.fuse_sec_nb}\")\n OutputTextFile.write(f\"\\nNumber of fuselage segments [-]: {self.fuse_seg_nb}\")\n OutputTextFile.write(f\"\\nCabin segments array [-]: {self.cabin_seg}\")\n OutputTextFile.write(f\"\\nFuse Length [m]: {np.around(self.fuse_length, 5)}\")\n OutputTextFile.write(f\"\\nFuse nose Length [m]: {np.around(self.fuse_nose_length, 5)}\")\n OutputTextFile.write(f\"\\nFuse cabin Length [m]: {np.around(self.fuse_cabin_length, 5)}\")\n OutputTextFile.write(f\"\\nFuse tail Length [m]: {np.around(self.fuse_tail_length, 5)}\")\n OutputTextFile.write(f\"\\nAircraft Length [m]: {np.around(self.tot_length, 5)}\")\n OutputTextFile.write(\n \"\\nCircumference of each section of the fuselage [m]:\"\n f\"\\n{np.around(self.fuse_sec_circ, 5)}\"\n )\n OutputTextFile.write(\n \"\\nRelative distance of each section of the\"\n + \"fuselage, respect to the first one [m]: \\n\"\n + str(np.around(self.fuse_sec_rel_dist, 5))\n )\n OutputTextFile.write(\n \"\\nLength of each segment of the fuselage [m]: \\n\"\n + str(np.around(self.fuse_seg_length, 5))\n )\n OutputTextFile.write(\n \"\\nMean fuselage width [m]: \" + str(np.around(self.fuse_mean_width, 5))\n )\n OutputTextFile.write(\n \"\\nWidth of each section of the fuselage [m]: \\n\"\n + str(np.around(self.fuse_sec_width, 5))\n )\n OutputTextFile.write(\n \"\\nVolume of each segment of the fuselage \"\n \"[m^3]: \\n\" + str(np.around(self.fuse_seg_vol, 5))\n )\n OutputTextFile.write(\n \"\\nVolume of the cabin [m^3]: \" + str(np.around(self.fuse_cabin_vol, 5))\n )\n OutputTextFile.write(\"\\nVolume of the fuselage [m^3]: \" + str(np.around(self.fuse_vol, 5)))\n OutputTextFile.write(\"\\n\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nWINGS -------------------------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(f\"\\nNumber of Wings [-]: {self.wing_nb}\")\n OutputTextFile.write(f\"\\nWing symmetry plane [-]: {self.wing_sym}\")\n OutputTextFile.write(f\"\\nNumber of wing sections [-]: {self.wing_sec_nb}\")\n OutputTextFile.write(f\"\\nNumber of wing segments [-]: {self.wing_seg_nb}\")\n OutputTextFile.write(f\"\\nWing Span [m]: \\n{np.around(self.wing_span, 5)}\")\n OutputTextFile.write(\n \"\\nWing MAC length [m]: \\n\"\n + str(\n np.around(\n self.wing_mac[\n 0,\n ],\n 5,\n )\n )\n )\n OutputTextFile.write(\n \"\\nWing MAC x,y,z coordinate [m]: \\n\"\n + str(\n np.around(\n self.wing_mac[\n 1:4,\n ],\n 5,\n )\n )\n )\n OutputTextFile.write(\n \"\\nWings sections thickness [m]: \\n\" + str(np.around(self.wing_sec_thickness, 5))\n )\n OutputTextFile.write(\n \"\\nWings sections mean thickness [m]: \\n\" + str(np.around(self.wing_sec_mean_thick, 5))\n )\n OutputTextFile.write(\n \"\\nWing segments length [m]: \\n\" + str(np.around(self.wing_seg_length, 5))\n )\n OutputTextFile.write(\n \"\\nWing max chord length [m]: \\n\" + str(np.around(self.wing_max_chord, 5))\n )\n OutputTextFile.write(\n \"\\nWing min chord length [m]: \\n\" + str(np.around(self.wing_min_chord, 5))\n )\n OutputTextFile.write(\n \"\\nWings planform area [m^2]: \\n\" + str(np.around(self.wing_plt_area, 5))\n )\n OutputTextFile.write(\n \"\\nMain wing planform area [m^2]: \" + str(np.around(self.wing_plt_area_main, 5))\n )\n OutputTextFile.write(\"\\nVolume of each wing [m^3]: \\n\" + str(np.around(self.wing_vol, 5)))\n OutputTextFile.write(\"\\nTotal wing volume [m^3]: \" + str(np.around(self.wing_tot_vol, 5)))\n OutputTextFile.write(\"\\nWing volume for fuel storage [m^3]: \" + str(self.wing_fuel_vol))\n\n # Close Text File\n OutputTextFile.close()",
"def geocube():",
"def rebuild_models(self):\n\n zmin = self._domain.z.lbound\n zmax = self._domain.z.rbound\n\n xmin = self._domain.x.lbound\n xmax = self._domain.x.rbound\n\n grid = self.mesh.mesh_coords()\n\n # the small number is added to prevent undesireable numerical effects\n air_depth = (1e-8 + 2.0/15.0) * (zmax - zmin) + zmin\n rock_bottom = 13.0/15.0 * (zmax - zmin) + zmin\n\n coast_left = 3.0/25.0 * (xmax - xmin) + xmin\n coast_right = 13.0/25.0 * (xmax - xmin) + xmin\n\n max_depth = zmax\n\n # Set up air layer\n if self._domain.dim == 2:\n n = (0., 1.)\n p = (coast_right, air_depth)\n else: # domain.dim == 3\n n = (0.0, 0.0, 1.0)\n p = (coast_right, coast_right, air_depth)\n\n air_plane = ImplicitPlane(p,n)\n air = air_plane\n\n # Set up rock layer\n if self._domain.dim == 2:\n n = (coast_right - coast_left, -(1.0 - air_depth))\n p = (coast_right, max_depth)\n n2 = (0., -1.)\n p2 = (0., rock_bottom)\n else: # domain.dim == 3\n n = (coast_right - coast_left, 0.0, -(1.0 - air_depth))\n p = (coast_right, 0.0, max_depth)\n n2 = (0., 0., -1.)\n p2 = (0., 0., rock_bottom)\n\n rock_plane = ImplicitPlane(p,n)\n rock_plane2 = ImplicitPlane(p2,n2)\n\n rock = ImplicitDifference(ImplicitUnion(rock_plane, rock_plane2), air_plane)\n\n C0 = air.interior(grid, True) * self.air_velocity + \\\n rock.interior(grid, True) * self.rock_velocity\n\n C0[np.where(C0 == 0.0)] = self.water_velocity\n\n submarine = self.submarine\n\n if submarine is not None:\n sub = submarine.implicit_surface\n\n C = air.interior(grid, True) * self.air_velocity + \\\n rock.interior(grid, True) * self.rock_velocity + \\\n sub.interior(grid, True) * submarine.velocity\n\n C[np.where(C == 0.0)] = self.water_velocity\n\n else:\n C = C0.copy()\n\n C.shape = self._mesh.shape()\n C0.shape = self._mesh.shape()\n\n self._true_model = C\n self._initial_model = C0",
"def generate_link_kml(self, d):\n return \"\"\"\\\n <NetworkLink>\n <name>%(image_filename)s</name>\n <Region>\n <Lod>\n <minLodPixels>%(minlodpixels)d</minLodPixels>\n <maxLodPixels>-1</maxLodPixels>\n </Lod>\n <LatLonAltBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonAltBox>\n </Region>\n <Link>\n <href>%(link_url)s</href>\n <viewRefreshMode>onRegion</viewRefreshMode>\n </Link>\n </NetworkLink>\"\"\" % d",
"def map2mw_Aug(d,k1,entry):\n L = entry.metad['L']\n if L in ['7201','7202']: # 7203 relates to 'hay'\n return 'hA'\n if k1 in map2mw_special_Aug:\n return map2mw_special_Aug[k1]\n regexes = [\n u'<ab>aug.</ab> de {%(.*?)%}',\n u'<ab>aug.</ab> {%(.*?)%}',\n u'<ab>aug.</ab> du <ab>c.</ab> de {%(.*?)%}',\n\n ]\n line = entry.datalines[0] # first line of entry in bur.txt\n for regex in regexes:\n m = re.search(regex,line)\n if m:\n root = m.group(1) # root in \n root_slp1=roman_slp1_mw(root,'verb',d)\n if root_slp1 != None:\n return root_slp1\n\n return '?'",
"def _gtTSmap(self):\n if os.path.isfile(self.outtsmap):\n # Already exists\n return\n\n if self.csys == 'GAL':\n center_icrs = SkyCoord(ra=self.ra*u.degree, dec=self.dec*u.degree, frame='icrs')\n self.ra = center_icrs.galactic.l.deg\n self.dec = center_icrs.galactic.b.deg\n\n model = os.path.join(self.workpath, 'TSmapModel.xml') \n rfil = open(self.outmodel, 'r')\n wfil = open(model, 'w')\n isSrc = False\n isDif = False\n for line in rfil:\n if (isSrc) and ('<source name' in line):\n # Arrived to a new source, restart copying\n isSrc = False\n if (isDif) and ('<source name' in line) and ('PointSource' in line):\n isDif = False\n if 'TARGET' in line:\n isSrc = True\n if ('<source name=\"gll_iem_v06\"' in line) or ('<source name=\"iso_source_v06\"' in line): \n isDif = True\n \n if isSrc:\n # Do not copy the Target model to make it appear in the TS map\n pass\n else:\n if isDif:\n # Leave Diffuse model normalizations free\n wfil.write(line)\n else:\n # Make sur the gtlike output source model has all source parameters fixed\n wfil.write(line.replace('free=\"1\"', 'free=\"0\"'))\n rfil.close()\n wfil.close()\n\n # Launch the gttsmap tool \n if self.mode == 'binned':\n os.popen(\"gttsmap evfile={} scfile={} bexpmap={} expcube={} cmap={} srcmdl={}\\\n outfile={} evtype={} irfs=CALDB optimizer=NewMinuit statistic=BINNED ftol=1e-2\\\n coordsys={} proj=AIT nxpix={} nypix={} binsz={} xref={} yref={}\".format(self.outmktime,\n self.ft2, self.outbinexp, self.outltcube, self.outbincub, model, self.outtsmap, self.evtype,\n self.csys, self.imwid, self.imwid, self.binsz, self.ra, self.dec))\n elif self.mode == 'unbinned':\n os.popen(\"gttsmap evfile={} scfile={} expmap={} expcube={} srcmdl={}\\\n outfile={} evtype={} irfs=CALDB optimizer=NewMinuit statistic=UNBINNED ftol=1e-2\\\n coordsys={} proj=AIT nxpix={} nypix={} binsz={} xref={} yref={}\".format(self.outmktime,\n self.ft2, self.outexpmap, self.outltcube, model, self.outtsmap, self.evtype,\n self.csys, self.imwid, self.imwid, self.binsz, self.ra, self.dec))\n else:\n return\n\n if self.csys == 'GAL':\n self.ra = center_icrs.ra.deg\n self.dec = center_icrs.dec.deg\n return",
"def sky_to_sky(lon, lat, in_system, out_system): \n from astropy.coordinates import ICRS, Galactic\n systems = dict(galactic=Galactic, icrs=ICRS)\n lon = np.asarray(lon)\n lat = np.asarray(lat)\n\n coords = systems[in_system](lon, lat, units='deg')\n return coords.transform_to(systems[out_system]).degrees",
"def to_swc(self, contributors=\"\"):\n from . import __version__\n sx, sy, sz = np.diag(self.transform)[:3]\n\n swc_header = f\"\"\"# ORIGINAL_SOURCE CloudVolume {__version__}\n# CREATURE \n# REGION\n# FIELD/LAYER\n# TYPE\n# CONTRIBUTOR {contributors}\n# REFERENCE\n# RAW \n# EXTRAS \n# SOMA_AREA\n# SHINKAGE_CORRECTION \n# VERSION_NUMBER {__version__}\n# VERSION_DATE {datetime.datetime.utcnow().isoformat()}\n# SCALE {sx:.6f} {sy:.6f} {sz:.6f}\n\"\"\"\n\n def generate_swc(skel, offset):\n if skel.edges.size == 0:\n return \"\"\n\n index = defaultdict(set)\n visited = defaultdict(bool)\n for e1, e2 in skel.edges:\n index[e1].add(e2)\n index[e2].add(e1)\n\n stack = [ skel.edges[0,0] ]\n parents = [ -1 ]\n\n swc = \"\"\n\n while stack:\n node = stack.pop()\n parent = parents.pop()\n\n if visited[node]:\n continue\n\n swc += \"{n} {T} {x:0.6f} {y:0.6f} {z:0.6f} {R:0.6f} {P}\\n\".format(\n n=(node + 1 + offset),\n T=skel.vertex_types[node],\n x=skel.vertices[node][0],\n y=skel.vertices[node][1],\n z=skel.vertices[node][2],\n R=skel.radii[node],\n P=parent if parent == -1 else (parent + 1 + offset),\n )\n\n visited[node] = True\n \n for child in index[node]:\n stack.append(child)\n parents.append(node)\n\n return swc\n\n skels = self.components()\n\n swc = swc_header + \"\\n\"\n offset = 0\n for skel in skels:\n swc += generate_swc(skel, offset) + \"\\n\"\n offset += skel.vertices.shape[0]\n\n return swc",
"def transit_model(kic, times):\n table = planet_props.table\n params = kic_to_params(kic)\n m = batman.TransitModel(params, times) #initializes model\n flux = m.light_curve(params) #calculates light curve\n return flux",
"def as_ewkt(self) -> ir.StringValue:\n return ops.GeoAsEWKT(self).to_expr()",
"def update(measurements, time):\n measurements = np.array(measurements)\n data = measurements[:,0:3] # array of [lat, lon, elevation]\n targets = measurements[:,3] # array of corresponding temperature values\n \n # Setup the KRLS (Kernel Recursive Least Squares method)\n params = dict(adopt_thresh=0.01, dico_max_size=100)\n kernel = i2maps.algorithms.kernel.Gaussian(np.array([70000, 70000, 155.5])) \n model = i2maps.algorithms.krls.KRLS(kernel, params)\n \n # Train the model (sample by sample)\n for i in range(0, len(data)):\n model.update(np.array(data[i, :]), np.array(targets[i]))\n \n # Do the full grid prediction for this timestep now and save it\n dem = i2maps.spatial_array.load(path + 'dem')\n dem_points = dem.items()\n output = model.query(dem_points).tolist()\n predicted = np.array(output).reshape(dem.shape)\n predicted[dem == dem.nodata] = dem.nodata\n \n # Open the raster cube\n try:\n raster_cube = i2maps.raster_cube.load(filename)\n except Exception, e:\n print(e)\n print(\"Creating new RasterCube\")\n #shape = (height, width, num_timesteps)\n shape = (372, 282, 24*7)\n #envelope = [[min_y, max_y], [min_x, max_x], [min_t, max_t]]\n envelope = [[7436000.0, 6692000.0], [-1168000.0, -604000.0], [0, 0]] \n raster_cube = i2maps.raster_cube.RasterCube(filename=filename, shape=shape, envelope=envelope)\n \n # Save the prediction surface into the raster cube at time t\n raster_cube.insert(predicted, time)",
"def facilities_as_kml(facilities):\n return KML.Folder(*[facility_as_kml(facility) for facility in facilities])",
"def transform(infile, output, insrs, format_name):\n\n logging.info('Transforming %s from %s to %s' % (infile, insrs, output)) \n in_srs = osr.SpatialReference()\n in_srs.ImportFromEPSG(insrs)\n out_srs = osr.SpatialReference()\n out_srs.ImportFromEPSG(4324)\n coordTrans = osr.CoordinateTransformation(in_srs, out_srs)\n\n in_dsn = ogr.Open(infile)\n in_layer = in_dsn.GetLayer()\n in_feature_definition = in_layer.GetLayerDefn()\n\n out_driver = ogr.GetDriverByName(format_name)\n out_dsn = out_driver.CreateDataSource(output)\n out_layer = out_dsn.CreateLayer(in_layer.GetName(),\n geom_type=in_layer.GetGeomType())\n\n # add fields\n for i in range(0, in_feature_definition.GetFieldCount()):\n fieldDefn = in_feature_definition.GetFieldDefn(i)\n out_layer.CreateField(fieldDefn)\n\n # get the output layer's feature definition\n out_feature_definition = out_layer.GetLayerDefn()\n\n # loop through the input features\n inFeature = in_layer.GetNextFeature()\n while inFeature:\n # get the input geometry\n geom = inFeature.GetGeometryRef().Clone()\n # reproject the geometry\n geom.Transform(coordTrans)\n # create a new feature\n outFeature = ogr.Feature(out_feature_definition)\n # set the geometry and attribute\n outFeature.SetGeometry(geom)\n for i in range(0, out_feature_definition.GetFieldCount()):\n outFeature.SetField(out_feature_definition.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i))\n # add the feature to the shapefile\n out_layer.CreateFeature(outFeature)\n # destroy the features and get the next input feature\n outFeature.Destroy()\n inFeature.Destroy()\n inFeature = in_layer.GetNextFeature()\n\n # close the shapefiles\n in_dsn.Destroy()\n out_dsn.Destroy()",
"def kmlWriter(output_data, output_dir, output_name):\n msg = 'Writing ' + output_name + ' KML output.'\n print '[+]', msg\n logging.info(msg)\n # Instantiate a Kml object and pass along the output filename\n kml = simplekml.Kml(name=output_name)\n for exif in output_data:\n if 'Latitude' in exif.keys() and 'Latitude Reference' in exif.keys() and 'Longitude Reference' in exif.keys() and 'Longitude' in exif.keys():\n\n if 'Original Date' in exif.keys():\n dt = exif['Original Date']\n else:\n dt = 'N/A'\n\n if exif['Latitude Reference'] == 'S':\n latitude = '-' + exif['Latitude']\n else:\n latitude = exif['Latitude']\n\n if exif['Longitude Reference'] == 'W':\n longitude = '-' + exif['Longitude']\n else:\n longitude = exif['Longitude']\n\n kml.newpoint(name=exif['Name'], description='Originally Created: ' + dt,\n coords=[(longitude, latitude)])\n else:\n pass\n kml.save(os.path.join(output_dir, output_name))",
"def main():\n #short GPS Test\n filename = 'KML_short_test.kml'\n gps_filename = 'gps_short_test.txt'\n gpsfile = open(gps_filename, 'r')\n file = open(filename, 'w')\n addHeader(file)\n coordinate_lst = convert(gpsfile)\n cleaned = GPS_to_CostMap.clean_gps_data(coordinate_lst)\n write_coordinates(cleaned, file)\n addTrailer(file)\n file.close()\n\n #Repeat test\n filename = 'KML_repeat_test1.kml'\n gps_filename = 'gps_1.txt'\n gpsfile = open(gps_filename, 'r')\n file = open(filename, 'w')\n addHeader(file)\n coordinate_lst = convert(gpsfile)\n cleaned = GPS_to_CostMap.clean_gps_data(coordinate_lst)\n write_coordinates(cleaned, file)\n addTrailer(file)\n file.close()\n\n filename = 'KML_repeat_test2.kml'\n gps_filename = 'gps_1.txt'\n gpsfile = open(gps_filename, 'r')\n file = open(filename, 'w')\n addHeader(file)\n coordinate_lst = convert(gpsfile)\n cleaned = GPS_to_CostMap.clean_gps_data(coordinate_lst)\n write_coordinates(cleaned, file)\n addTrailer(file)\n file.close()",
"def main():\n # Constants\n groundstation_name = 'Wallops Antenna'\n groundstation_address = 'Radar Road, Temperanceville, VA 23442'\n satnum = 25544 # ISS = 25544\n saturl=\"http://www.celestrak.com/NORAD/elements/stations.txt\"\n gs_minimum_elevation_angle = 10.0\n\n # Alternate constants\n gs_alt_lat = 37.854886 # Only needed if address not found\n gs_alt_lon = -75.512936 # Ditto\n gs_alt_el_meters = 3.8 # Ditto\n gs_alt_tz_offset_seconds = -18000.0 # Ditto\n gs_tzname = 'US/Eastern'\n\n # Construct the ground station info\n try:\n # Try to use the address...\n gs = GroundStation.from_address(groundstation_address, \\\n groundstation_name, \\\n gs_minimum_elevation_angle)\n except:\n # Otherwise, use explicit location data...\n gs = GroundStation.from_location(gs_alt_lat, gs_alt_lon, \\\n gs_alt_el_meters, \\\n gs_tzname, \\\n groundstation_name, \\\n gs_minimum_elevation_angle)\n\n # Times we need\n now = datetime.now()\n gs_today = gs.get_tz().localize(datetime(now.year, now.month, now.day))\n gs_today_start = gs.get_tz().localize(datetime(now.year, now.month, now.day, \\\n 0, 0, 0)) \n gs_today_end = gs.get_tz().localize(datetime(now.year, now.month, now.day, \\\n 23, 59, 59))\n\n # Get the InviewCalculator and compute the inviews\n st = SatelliteTle(satnum, tle_url=saturl)\n ic = InviewCalculator(gs, st)\n inviews = ic.compute_inviews(gs_today_start, gs_today_end)\n\n # Print the results\n print_satellite_header(st)\n print_inview_header(gs.get_minimum_elevation_angle(), gs_today, gs)\n print_inviews(gs, inviews)\n print_azeltables(inviews, ic)",
"def ras2ijk(self,A):\n #productive #math #coordinate-space-conversion\n profprint()\n m=vtk.vtkMatrix4x4()\n volumeNode = slicer.app.layoutManager().sliceWidget(\"Red\").sliceLogic().GetBackgroundLayer().GetVolumeNode()\n volumeNode.GetIJKToRASMatrix(m)\n m.Invert()\n imageData = volumeNode.GetImageData()\n ijk=[0,0,0]\n k = vtk.vtkMatrix4x4()\n o = vtk.vtkMatrix4x4()\n k.SetElement(0,3,A[0])\n k.SetElement(1,3,A[1])\n k.SetElement(2,3,A[2])\n k.Multiply4x4(m,k,o)\n ijk[0] = o.GetElement(0,3)\n ijk[1] = o.GetElement(1,3)\n ijk[2] = o.GetElement(2,3)\n return ijk",
"def ISOMAPEmbbeding(TurosR=10,Torusr=4,Classes=[3,5,7],nei=[5,10,20], DataSet = {'Turos', 'Digits'}):\n\n S, dig = CreateDS_Torus_Digits(TurosR=TurosR,Torusr=Torusr,Classes=[3,5,7])\n ### ------ Isomap ------###\n nei = nei\n\n if 'Turos' in DataSet:\n # Ploting Torus Isomapping\n fig = plt.figure(figsize=(30, 10))\n for i, j in enumerate(nei):\n Torus_isomap = Isomap(S, 2, j)\n neighbors = j\n method = 'Torus ISOMAP'\n ax = fig.add_subplot(1, len(nei), i + 1)\n scatter = ax.scatter(Torus_isomap[:, 0], Torus_isomap[:, 1], c=S[:, 0:1], cmap=plt.cm.Spectral)\n # legend = ax.legend(*scatter.legend_elements(), loc=\"lower left\", title=\"Classes\")\n # ax.add_artist(legend)\n # ax.legend()\n ax.set_title('{} with {} Neighbours'.format(method, neighbors))\n # making_plot(Torus_isomap, pallete=S[:, 0:1], neighbors=j, method='Torus ISOMAP') #An option to plot single graphs\n plt.savefig('Torus ISOMAP embbeding for {} neighbour'.format(nei))\n\n if 'Digits' in DataSet:\n # Plotting Digits Isomapping\n for Argclass, Specificcalss in enumerate(dig):\n fig = plt.figure(figsize=(30, 10))\n for i, j in enumerate(nei):\n neighbors = j\n Digit_isomap = Isomap(Specificcalss[0], 2, j)\n method = 'Digit ISOMAP'\n ax = fig.add_subplot(1, len(nei), i + 1)\n scatter = ax.scatter(Digit_isomap[:, 0], Digit_isomap[:, 1], c=Specificcalss[1], cmap=plt.cm.Spectral)\n legend = ax.legend(*scatter.legend_elements(), loc=\"lower left\", title=\"Classes\")\n ax.add_artist(legend)\n ax.legend()\n ax.set_title('{} with {} Neighbours'.format(method, neighbors))\n # making_plot(Digit_isomap, Specificcalss[1], neighbors=j, method='Digit ISOMAP') #An option to plot single graphs\n plt.savefig('Digits up to {} - ISOMAP embbeding for {} neighbour'.format(Classes[Argclass], nei))",
"def read_szx_fmv_13(eps_file):\n raw_data = eps_file.scaled_mdr\n raw_unscaled = eps_file.mdr\n mphr = eps_file.mphr\n\n n_node_per_line = raw_data[\"LONGITUDE\"].shape[1]\n n_lines = raw_data[\"LONGITUDE\"].shape[0]\n n_records = raw_data[\"LONGITUDE\"].size\n\n data = {}\n metadata = {}\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n ascat_time = shortcdstime2jd(raw_data[\"UTC_LINE_NODES\"].flatten()[\"day\"],\n raw_data[\"UTC_LINE_NODES\"].flatten()[\"time\"])\n data[\"jd\"] = ascat_time[idx_nodes]\n\n metadata[\"spacecraft_id\"] = np.int8(mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(mphr[\"ORBIT_START\"])\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n\n for f in fields:\n metadata[f] = np.int16(mphr[f.upper()])\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"abs_line_number\"\n ]\n\n for f in fields:\n data[f] = raw_data[f.upper()].flatten()[idx_nodes]\n\n fields = [(\"longitude\", long_nan), (\"latitude\", long_nan),\n (\"swath indicator\", byte_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].flatten()\n valid = raw_unscaled[f.upper()].flatten() != nan_val\n data[f][~valid] = nan_val\n\n fields = [(\"sigma0_trip\", long_nan), (\"inc_angle_trip\", uint_nan),\n (\"azi_angle_trip\", int_nan), (\"kp\", uint_nan),\n (\"num_val_trip\", ulong_nan), (\"f_kp\", byte_nan),\n (\"f_usable\", byte_nan), (\"land_frac\", uint_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].reshape(n_records, 3)\n valid = raw_unscaled[f.upper()].reshape(n_records, 3) != nan_val\n data[f][~valid] = nan_val\n\n # modify longitudes from (0, 360) to (-180,180)\n mask = np.logical_and(data[\"longitude\"] != long_nan,\n data[\"longitude\"] > 180)\n data[\"longitude\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n mask = (data[\"azi_angle_trip\"] != int_nan) & (data[\"azi_angle_trip\"] < 0)\n data[\"azi_angle_trip\"][mask] += 360\n\n data[\"node_num\"] = np.tile((np.arange(n_node_per_line) + 1),\n n_lines).astype(np.uint8)\n\n data[\"line_num\"] = idx_nodes.astype(np.uint16)\n\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n data[\"swath_indicator\"] = data.pop(\"swath indicator\")\n\n data[\"f_land\"] = data.pop(\"land_frac\")\n\n return data, metadata",
"def readKML(filename):\n\n kml_file = path.join(filename)\n\n #### se leen los elementos del KML\n with open(kml_file) as f:\n folder = parser.parse(f).getroot().Document.Folder\n\n #### se separan los elementos, nombres de los puntos y las coordenadas\n plnm=[]\n cordi=[]\n for pm in folder.Placemark:\n plnm1 = pm.name\n plcs1 = pm.Point.coordinates\n plnm.append(plnm1.text)\n cordi.append(plcs1.text)\n # print(cordi)\n # print(plnm) \n\n #### se genera el objeto pandas\n db=pd.DataFrame()\n db['point_name']=plnm\n db['cordinates']=cordi\n\n db['Longitude'], db['Latitude'], db['value'] = zip(*db['cordinates'].apply(lambda x: x.split(',', 2)))\n db[\"Longitude\"] = pd.to_numeric(db[\"Longitude\"])\n db[\"Latitude\"] = pd.to_numeric(db[\"Latitude\"])\n del db['cordinates']\n del db['value']\n\n db['Coordinates'] = list(zip(db.Longitude, db.Latitude))\n db['Coordinates'] = db['Coordinates'].apply(Point)\n\n # print(db)\n\n return db",
"def json_to_lkg(filename):\n\n nx_graph = json_to_nx(filename)\n lkg = nx_to_lkg(nx_graph)\n return(lkg)",
"def education(osm_path): \n return (retrieve(osm_path,'multipolygons',['amenity'],**{'amenity':[\"='college' or \",\"='kindergarten' or \",\"='library' or \",\"='school' or \",\"='university'\"]})).rename(columns={'amenity': 'asset'})",
"def full_sky_car_template(ncomp, res):\n\n if ncomp == 3:\n pre = (3,)\n else:\n pre = ()\n\n res = res * np.pi / (180 * 60)\n temp = so_map()\n shape, wcs = enmap.fullsky_geometry(res=res, dims=pre)\n temp.data = enmap.zeros(shape, wcs=wcs, dtype=None)\n temp.pixel = \"CAR\"\n temp.nside = None\n temp.ncomp = ncomp\n temp.geometry = temp.data.geometry[1:]\n temp.coordinate = \"equ\"\n return temp",
"def generate_openlayers( self ):\n\n args = {}\n args['title'] = self.options.title\n args['googlemapskey'] = self.options.googlekey\n args['yahooappid'] = self.options.yahookey\n args['south'], args['west'], args['north'], args['east'] = self.swne\n args['minzoom'] = self.tminz\n args['maxzoom'] = self.tmaxz\n args['tilesize'] = self.tilesize\n args['tileformat'] = format_extension[self.image_output.format]\n if self.image_output.format == \"PNG\":\n args['has_alpha'] = 'true'\n else:\n args['has_alpha'] = 'false'\n args['publishurl'] = \"\" if self.options.url is None else self.options.url\n args['copyright'] = self.options.copyright\n if self.options.profile in ('raster', 'gearth'):\n args['rasterzoomlevels'] = self.tmaxz+1\n args['rastermaxresolution'] = 2**(self.nativezoom) * self.out_gt[1]\n\n s = \"\"\"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">\n <html xmlns=\"http://www.w3.org/1999/xhtml>\"\n <head>\n <title>%(title)s</title>\n <meta http-equiv='imagetoolbar' content='no'/>\n <style type=\"text/css\"> v\\:* {behavior:url(#default#VML);}\n html, body { overflow: hidden; padding: 0; height: 100%%; width: 100%%; font-family: 'Lucida Grande',Geneva,Arial,Verdana,sans-serif; }\n body { margin: 10px; background: #fff; }\n h1 { margin: 0; padding: 6px; border:0; font-size: 20pt; }\n #header { height: 43px; padding: 0; background-color: #eee; border: 1px solid #888; }\n #subheader { height: 12px; text-align: right; font-size: 10px; color: #555;}\n #map { height: 95%%; border: 1px solid #888; }\n </style>\"\"\" % args\n\n if self.options.profile == 'mercator':\n s += \"\"\"\n <script src='http://dev.virtualearth.net/mapcontrol/mapcontrol.ashx?v=6.1'></script>\n <script src='http://maps.google.com/maps?file=api&v=2&key=%(googlemapskey)s' type='text/javascript'></script>\n <script src=\"http://api.maps.yahoo.com/ajaxymap?v=3.0&appid=%(yahooappid)s\"></script>\"\"\" % args\n\n s += \"\"\"\n <script src=\"http://www.openlayers.org/api/2.7/OpenLayers.js\" type=\"text/javascript\"></script>\n <script type=\"text/javascript\">\n var map;\n var mapBounds = new OpenLayers.Bounds( %(west)s, %(south)s, %(east)s, %(north)s);\n var mapMinZoom = %(minzoom)s;\n var mapMaxZoom = %(maxzoom)s;\n\n // avoid pink tiles\n OpenLayers.IMAGE_RELOAD_ATTEMPTS = 3;\n OpenLayers.Util.onImageLoadErrorColor = \"transparent\";\n\n function init(){\"\"\" % args\n\n if self.options.profile == 'mercator':\n s += \"\"\"\n var options = {\n controls: [],\n projection: new OpenLayers.Projection(\"EPSG:900913\"),\n displayProjection: new OpenLayers.Projection(\"EPSG:4326\"),\n units: \"m\",\n maxResolution: 156543.0339,\n maxExtent: new OpenLayers.Bounds(-20037508, -20037508, 20037508, 20037508.34)\n };\n map = new OpenLayers.Map('map', options);\n\n // create Google Mercator layers\n var gmap = new OpenLayers.Layer.Google(\"Google Streets\",\n { sphericalMercator: true, numZoomLevels: 20} );\n var gsat = new OpenLayers.Layer.Google(\"Google Satellite\",\n {type: G_SATELLITE_MAP, sphericalMercator: true, numZoomLevels: 20} );\n var ghyb = new OpenLayers.Layer.Google(\"Google Hybrid\",\n {type: G_HYBRID_MAP, sphericalMercator: true, numZoomLevels: 20});\n var gter = new OpenLayers.Layer.Google(\"Google Terrain\",\n {type: G_PHYSICAL_MAP, sphericalMercator: true, numZoomLevels: 20 });\n\n // create Virtual Earth layers\n OpenLayers.Layer.VirtualEarth.prototype.MAX_ZOOM_LEVEL=19;\n OpenLayers.Layer.VirtualEarth.prototype.RESOLUTIONS=OpenLayers.Layer.Google.prototype.RESOLUTIONS\n var veroad = new OpenLayers.Layer.VirtualEarth(\"Virtual Earth Roads\",\n {'type': VEMapStyle.Road, 'sphericalMercator': true, numZoomLevels: 20});\n var veaer = new OpenLayers.Layer.VirtualEarth(\"Virtual Earth Aerial\",\n {'type': VEMapStyle.Aerial, 'sphericalMercator': true, numZoomLevels: 20 });\n var vehyb = new OpenLayers.Layer.VirtualEarth(\"Virtual Earth Hybrid\",\n {'type': VEMapStyle.Hybrid, 'sphericalMercator': true});\n\n // create Yahoo layer\n var yahoo = new OpenLayers.Layer.Yahoo(\"Yahoo Street\",\n {'sphericalMercator': true});\n var yahoosat = new OpenLayers.Layer.Yahoo(\"Yahoo Satellite\",\n {'type': YAHOO_MAP_SAT, 'sphericalMercator': true});\n var yahoohyb = new OpenLayers.Layer.Yahoo(\"Yahoo Hybrid\",\n {'type': YAHOO_MAP_HYB, 'sphericalMercator': true});\n\n // create OSM/OAM layer\n var osm = new OpenLayers.Layer.TMS( \"OpenStreetMap\",\n \"http://tile.openstreetmap.org/\",\n { type: 'png', getURL: osm_getTileURL, displayOutsideMaxExtent: true,\n attribution: '<a href=\"http://www.openstreetmap.org/\">OpenStreetMap</a>'} );\n var oam = new OpenLayers.Layer.TMS( \"OpenAerialMap\",\n \"http://tile.openaerialmap.org/tiles/1.0.0/openaerialmap-900913/\",\n { type: 'png', getURL: osm_getTileURL } );\n\n // create TMS Overlay layer\n var tmsoverlay = new OpenLayers.Layer.TMS( \"TMS Overlay\", \"\",\n { // url: '', serviceVersion: '.', layername: '.',\n type: '%(tileformat)s', getURL: overlay_getTileURL, alpha: %(has_alpha)s,\n isBaseLayer: false\n });\n if (OpenLayers.Util.alphaHack() == false) { tmsoverlay.setOpacity(0.7); }\n\n map.addLayers([gmap, gsat, ghyb, gter, veroad, veaer, vehyb,\n yahoo, yahoosat, yahoohyb, osm, oam,\n tmsoverlay]);\n\n var switcherControl = new OpenLayers.Control.LayerSwitcher();\n map.addControl(switcherControl);\n switcherControl.maximizeControl();\n\n map.zoomToExtent( mapBounds.transform(map.displayProjection, map.projection ) );\n \"\"\" % args\n\n elif self.options.profile == 'geodetic':\n s += \"\"\"\n var options = {\n controls: [],\n projection: new OpenLayers.Projection(\"EPSG:4326\"),\n maxResolution: 0.703125,\n maxExtent: new OpenLayers.Bounds(-180, -90, 180, 90)\n };\n map = new OpenLayers.Map('map', options);\n\n layer = new OpenLayers.Layer.WMS( \"Blue Marble\",\n \"http://labs.metacarta.com/wms-c/Basic.py?\", {layers: 'satellite' } );\n map.addLayer(layer);\n wms = new OpenLayers.Layer.WMS( \"VMap0\",\n \"http://labs.metacarta.com/wms-c/Basic.py?\", {layers: 'basic', format: 'image/png' } );\n map.addLayer(wms);\n\n var tmsoverlay = new OpenLayers.Layer.TMS( \"TMS Overlay\", \"\",\n {\n serviceVersion: '.', layername: '.', alpha: %(has_alpha)s,\n type: '%(tileformat)s', getURL: overlay_getTileURL,\n isBaseLayer: false\n });\n map.addLayer(tmsoverlay);\n if (OpenLayers.Util.alphaHack() == false) { tmsoverlay.setOpacity(0.7); }\n\n var switcherControl = new OpenLayers.Control.LayerSwitcher();\n map.addControl(switcherControl);\n switcherControl.maximizeControl();\n\n map.zoomToExtent( mapBounds );\n \"\"\" % args\n\n elif self.options.profile in ('raster', 'gearth'):\n s += \"\"\"\n var options = {\n controls: [],\n maxExtent: new OpenLayers.Bounds( %(west)s, %(south)s, %(east)s, %(north)s ),\n maxResolution: %(rastermaxresolution)f,\n numZoomLevels: %(rasterzoomlevels)d\n };\n map = new OpenLayers.Map('map', options);\n\n var layer = new OpenLayers.Layer.TMS( \"TMS Layer\",\"\",\n { url: '', serviceVersion: '.', layername: '.', alpha: %(has_alpha)s,\n type: '%(tileformat)s', getURL: overlay_getTileURL\n });\n map.addLayer(layer);\n map.zoomToExtent( mapBounds );\n \"\"\" % args\n\n\n s += \"\"\"\n map.addControl(new OpenLayers.Control.PanZoomBar());\n map.addControl(new OpenLayers.Control.MousePosition());\n map.addControl(new OpenLayers.Control.MouseDefaults());\n map.addControl(new OpenLayers.Control.KeyboardDefaults());\n }\n \"\"\" % args\n\n if self.options.profile == 'mercator':\n s += \"\"\"\n function osm_getTileURL(bounds) {\n var res = this.map.getResolution();\n var x = Math.round((bounds.left - this.maxExtent.left) / (res * this.tileSize.w));\n var y = Math.round((this.maxExtent.top - bounds.top) / (res * this.tileSize.h));\n var z = this.map.getZoom();\n var limit = Math.pow(2, z);\n\n if (y < 0 || y >= limit) {\n return \"https://github.com/mj10777/mapmbtiles/img/none.png\";\n } else {\n x = ((x %% limit) + limit) %% limit;\n return this.url + z + \"/\" + x + \"/\" + y + \".\" + this.type;\n }\n }\n\n function overlay_getTileURL(bounds) {\n var res = this.map.getResolution();\n var x = Math.round((bounds.left - this.maxExtent.left) / (res * this.tileSize.w));\n var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h));\n var z = this.map.getZoom();\n if (this.map.baseLayer.name == 'Virtual Earth Roads' || this.map.baseLayer.name == 'Virtual Earth Aerial' || this.map.baseLayer.name == 'Virtual Earth Hybrid') {\n z = z + 1;\n }\n if (mapBounds.intersectsBounds( bounds ) && z >= mapMinZoom && z <= mapMaxZoom ) {\n //console.log( this.url + z + \"/\" + x + \"/\" + y + \".\" + this.type);\n return this.url + z + \"/\" + x + \"/\" + y + \".\" + this.type;\n } else {\n return \"https://github.com/mj10777/mapmbtiles/img/none.png\";\n }\n }\n \"\"\" % args\n\n elif self.options.profile == 'geodetic':\n s += \"\"\"\n function overlay_getTileURL(bounds) {\n bounds = this.adjustBounds(bounds);\n var res = this.map.getResolution();\n var x = Math.round((bounds.left - this.tileOrigin.lon) / (res * this.tileSize.w));\n var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h));\n var z = this.map.getZoom();\n var path = this.serviceVersion + \"/\" + this.layername + \"/\" + z + \"/\" + x + \"/\" + y + \".\" + this.type;\n var url = this.url;\n if (mapBounds.intersectsBounds( bounds ) && z >= mapMinZoom && z <= mapMaxZoom) {\n // console.log( this.url + z + \"/\" + x + \"/\" + y + \".\" + this.type);\n return this.url + z + \"/\" + x + \"/\" + y + \".\" + this.type;\n } else {\n return \"https://github.com/mj10777/mapmbtiles/img/none.png\";\n }\n }\n \"\"\" % args\n\n elif self.options.profile in ('raster','gearth'):\n s += \"\"\"\n function overlay_getTileURL(bounds) {\n var res = this.map.getResolution();\n var x = Math.round((bounds.left - this.maxExtent.left) / (res * this.tileSize.w));\n var y = Math.round((bounds.bottom - this.maxExtent.bottom) / (res * this.tileSize.h));\n var z = this.map.getZoom();\n if (x >= 0 && y >= 0) {\n return this.url + z + \"/\" + x + \"/\" + y + \".\" + this.type;\n } else {\n return \"https://github.com/mj10777/mapmbtiles/img/none.png\";\n }\n }\n \"\"\" % args\n\n s += \"\"\"\n function getWindowHeight() {\n if (self.innerHeight) return self.innerHeight;\n if (document.documentElement && document.documentElement.clientHeight)\n return document.documentElement.clientHeight;\n if (document.body) return document.body.clientHeight;\n return 0;\n }\n\n function getWindowWidth() {\n if (self.innerWidth) return self.innerWidth;\n if (document.documentElement && document.documentElement.clientWidth)\n return document.documentElement.clientWidth;\n if (document.body) return document.body.clientWidth;\n return 0;\n }\n\n function resize() {\n var map = document.getElementById(\"map\");\n var header = document.getElementById(\"header\");\n var subheader = document.getElementById(\"subheader\");\n map.style.height = (getWindowHeight()-80) + \"px\";\n map.style.width = (getWindowWidth()-20) + \"px\";\n header.style.width = (getWindowWidth()-20) + \"px\";\n subheader.style.width = (getWindowWidth()-20) + \"px\";\n if (map.updateSize) { map.updateSize(); };\n }\n\n onresize=function(){ resize(); };\n\n </script>\n </head>\n <body onload=\"init()\">\n <div id=\"header\"><h1>%(title)s</h1></div>\n <div id=\"subheader\">Generated by <a href=\"https://github.com/mj10777/mapmbtiles\">MapMbTiles</a>/<a href=\"http://www.klokan.cz/projects/gdal2mbtiles/\">GDAL2MbTiles</a>, Copyright © 2008 <a href=\"http://www.klokan.cz/\">Klokan Petr Pridal</a>, <a href=\"http://www.gdal.org/\">GDAL</a> & <a href=\"http://www.osgeo.org/\">OSGeo</a> <a href=\"http://code.google.com/soc/\">GSoC</a>\n <!-- PLEASE, LET THIS NOTE ABOUT AUTHOR AND PROJECT SOMEWHERE ON YOUR WEBSITE, OR AT LEAST IN THE COMMENT IN HTML. THANK YOU -->\n </div>\n <div id=\"map\"></div>\n <script type=\"text/javascript\" >resize()</script>\n </body>\n </html>\"\"\" % args\n\n return s",
"def visualize(self):\n self.octree.updateInnerOccupancy()\n print(\"Start Octomap Visualization\")\n\n # define parameters\n data = imgviz.data.arc2017()\n camera_info = data['camera_info']\n K = np.array(camera_info['K']).reshape(3, 3)\n width=camera_info['width']\n height=camera_info['height']\n\n # get free and occupied grid\n occupied, _ = self.octree.extractPointCloud()\n #frontier = self.gen_frontier()\n \n print(\"load point cloud\")\n window = pyglet.window.Window(\n width=int(1280), height=int(960)\n )\n\n @window.event\n def on_key_press(symbol, modifiers):\n if modifiers == 0:\n if symbol == pyglet.window.key.Q:\n window.on_close()\n\n gui = glooey.Gui(window)\n hbox = glooey.HBox()\n hbox.set_padding(5)\n\n camera = trimesh.scene.Camera(\n resolution=(width, height), focal=(K[0, 0], K[1, 1])\n )\n\n # initial camera pose\n camera_transform = np.array(\n [\n [1, 0, 0, 0],\n [0, -1, 0, 0],\n [0, 0, -1, -5],\n [0.0, 0.0, 0.0, 1.0],\n ],\n )\n\n \n\n occupied_geom = trimesh.voxel.ops.multibox(\n occupied, pitch=self.resolution, colors=[0.0, 0.0, 0.0, 0.5]\n )\n\n # frontier_geom = trimesh.voxel.ops.multibox(\n # frontier, pitch=self.resolution, colors=[1.0, 0, 0, 0.5]\n # )\n scene = trimesh.Scene(camera=camera, geometry=[occupied_geom])#, frontier_geom])\n scene.camera_transform = camera_transform\n hbox.add(self.labeled_scene_widget(scene, label='octomap'))\n\n\n gui.add(hbox)\n pyglet.app.run()",
"def neo4j_to_lkg():\n node_types = [\"judge\", \"keyword\", \"case\", \"catch\", \"act\", \"year\"]\n from backend.graph_formation.base.legal_knowledge_graph import LegalKnowledgeGraph\n\n lkg = LegalKnowledgeGraph()\n db = GraphDatabase(ENV[\"DB_URL\"], username=ENV[\"DB_USERNAME\"], password=ENV[\"DB_PASSWORD\"])\n # Authentication for NEO4J Browser\n\n for node_type in node_types:\n q = \"MATCH (c:{}) return c\".format(node_type) #Quering for all nodes in the graph\n results = db.query(q)\n for record in results:\n props={}\n node = record[0]\n if node:\n label = node[\"metadata\"][\"labels\"]\n node_id = node[\"data\"][\"id\"]\n node[\"data\"].pop(\"id\",None)\n props = node[\"data\"]\n props[\"type\"] = label\n lkg.add_node(id, **props)\n for node_type_1 in node_types:\n for node_type_2 in node_types:\n q = \"MATCH (c:{})-[r]->(m:{}) return c,m\".format(node_type_1, node_type_2) # Quering for all Relationships in the graph\n results = db.query(q)\n for record in results:\n node1 , node2 = record\n lkg.add_edge(node1[\"data\"][\"id\"], node2[\"data\"][\"id\"])\n return(lkg)",
"def utt_to_scene(file_name):\n with open(file_name, 'r') as file:\n data = file.readlines()\n data = [line.strip().split() for line in data if line.strip() != '']\n data = [[line[0], \" \".join(line[1:])] for line in data]\n preproc_data = [[line[0], list(map(lambda x: x[:x.find(\":\")], line[1].split(',')))[:-1]] for line in data]\n scene_mapping = {line[0]: line[1] for line in preproc_data}\n return scene_mapping"
] | [
"0.70086014",
"0.6861497",
"0.61510426",
"0.5941489",
"0.59399176",
"0.59311515",
"0.58990943",
"0.5860803",
"0.5813129",
"0.5499347",
"0.54266125",
"0.5407943",
"0.5396494",
"0.5380504",
"0.53745914",
"0.5275087",
"0.52564675",
"0.5236269",
"0.51970184",
"0.516719",
"0.5145743",
"0.5102854",
"0.5098353",
"0.50761837",
"0.5075811",
"0.50413555",
"0.49580017",
"0.49573904",
"0.49455327",
"0.4936676",
"0.49348822",
"0.4934762",
"0.49329126",
"0.49233508",
"0.4914022",
"0.49117008",
"0.49084353",
"0.4893643",
"0.48855856",
"0.48805365",
"0.48464897",
"0.48398152",
"0.48095083",
"0.4801413",
"0.4795352",
"0.47935948",
"0.47669494",
"0.47642025",
"0.47582933",
"0.47573626",
"0.47571388",
"0.4750875",
"0.4730881",
"0.47271147",
"0.47271007",
"0.47251296",
"0.47106138",
"0.47012052",
"0.46922228",
"0.46878132",
"0.46840152",
"0.46838605",
"0.4683116",
"0.4682862",
"0.46743482",
"0.4672674",
"0.4672067",
"0.46701106",
"0.46661428",
"0.4652358",
"0.46465486",
"0.46455997",
"0.46310544",
"0.4627059",
"0.46255422",
"0.46185854",
"0.46170843",
"0.46157107",
"0.4614282",
"0.4612075",
"0.46112812",
"0.46037397",
"0.46031246",
"0.4601938",
"0.46009028",
"0.45986903",
"0.459481",
"0.4589053",
"0.45887834",
"0.45869175",
"0.45854065",
"0.45767978",
"0.45741183",
"0.45731127",
"0.45701683",
"0.45697016",
"0.45584497",
"0.4557037",
"0.45505527",
"0.4539055"
] | 0.6388248 | 2 |
generates json format for web page visualization | def make_json_airspace_format(self):
# The previous fct make_open_airspace_format already stored, coordinates_kml, name and type
# This data is collected in an dictionary, which then is stored as json.
# initialize dict
coordinates_as_list_of_floats = []
# run through coordinates
coordinates_as_list_of_floats = []
for coo_pt in self.coordinates_kml.split(' ')[:-1]:
lat_long = coo_pt.split(',')
coordinates_as_list_of_floats.append([float(lat_long[1]), float(lat_long[0])])
# make json dict
# rename name if not thermal space
if self.name.startswith('TS_') and not (self.as_type == 'A' or self.as_type == 'B'):
name_for_json = self.name[3:]
else:
name_for_json = self.name
# rename airspace type for json:
if self.as_type == 'A':
self.as_type = 'Good_thermals'
if self.as_type == 'B':
self.as_type = 'Bad_thermals'
self.json_dict = {"AL": "FL98", "AH": "FL99", "AC": self.as_type, "AN": name_for_json, "data": coordinates_as_list_of_floats} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def json(self) -> PageJson:\n\n json: PageJson = {}\n json[\"id\"] = self.id\n json[\"cells\"] = [cell.json() for cell in self.cells]\n json[\"data\"] = self.data\n return json",
"def json_view(self, recursive=False):\n\n context = self.context.aq_inner\n data = self.export(context, recursive=recursive)\n pretty = json.dumps(data, sort_keys=True, indent=4)\n self.request.response.setHeader(\"Content-type\", \"application/json\")\n return pretty",
"def data_json(request):\n json_data = []\n for resource in Resource.objects.all():\n record = {} \n record['title'] = resource.name\n record['description'] = resource.description\n record['keyword'] = resource.csw_keywords.split(',')\n record['modified'] = resource.last_updated\n record['publisher'] = resource.organization\n record['contactPoint'] = resource.metadata_contact\n record['mbox'] = resource.contact_email\n record['identifier'] = resource.csw_identifier\n if resource.is_published:\n record['accessLevel'] = 'public'\n else:\n record['accessLevel'] = 'non-public'\n\n json_data.append(record)\n\n return HttpResponse(json.dumps(json_data), 'application/json')",
"def json(self):\n robot_dict = self.robot_dict()\n target_dict = self.target_dict()\n json_str = '{'\n json_str = json_str + '\"robot_obj\" : ' + json.dumps(robot_dict) + \",\\n\"\n json_str = json_str + '\"target_obj\" : ' + json.dumps(target_dict) + \"\\n\"\n json_str = json_str + '}'\n return(json_str)",
"def json(self):\r\n return {\"id\": self.id, \"code\": self.code, \"description\": self.description, \"xCoor\": self.x_coor, \"yCoor\": self.y_coor, \"latitude\": self.latitude,\r\n \"longitude\": self.longitude, \"waterschapId\": self.waterschap_id, \"watertypeId\": self.watertype_id, \"watertypeKrwId\": self.watertype_krw_id}",
"def json(data):\n if isinstance(data, dict):\n data = ujson.encode(data)\n uid = str(uuid.uuid4())\n display(HTML('<div id=\"{0}\" style=\"height: 600px; width:100%;\"></div>'.format(uid)))\n display(Javascript(\"\"\"\n require([\"https://rawgit.com/caldwell/renderjson/master/renderjson.js\"], function() {\n document.getElementById('%s').appendChild(renderjson(%s))\n });\n \"\"\" % (uid, data)))",
"def json_data(self):\n self.check_proof()\n return {\n \"vars\": [{'name': v.name, 'T': str(v.T)} for v in self.vars],\n \"proof\": sum([printer.export_proof_item(self.thy, item, unicode=True, highlight=True)\n for item in self.prf.items], []),\n \"report\": self.rpt.json_data(),\n \"method_sig\": self.get_method_sig()\n }",
"def json_friendly(self):",
"def json(self, update=False):\n return json.dumps(self.export(update=update), indent=4)",
"def visualize_json(request, handle_id):\n # Get the node\n nh = NodeHandle.objects.get(pk=handle_id)\n root_node = nc.get_node_model(nc.graphdb.manager, nh.handle_id)\n if root_node:\n # Create the data JSON structure needed\n graph_dict = arborgraph.create_generic_graph(root_node)\n jsonstr = arborgraph.get_json(graph_dict)\n else:\n jsonstr = '{}'\n return HttpResponse(jsonstr, content_type='application/json')",
"def format(self, *args):\n\t\tweb.header('Content-Type', 'application/json; charset=utf-8')\n\t\treturn json.dumps(self.content)",
"def render_dictionary(self): \n asset_json = {\n 'name': self.name,\n 'product_name': self.product_name,\n 'product_vendor': self.product_vendor,\n 'configuration': self.configuration,\n 'description': self.description,\n 'primary_users': self.primary_users,\n 'primary_voting': self.primary_voting,\n 'secondary_users': self.secondary_users,\n 'secondary_voting': self.secondary_voting,\n 'tags': self.tags,\n 'type': self.asset_type,\n 'action_whitelist': self.action_whitelist\n }\n\n if self.ingest_container_label:\n asset_json['ingest'] = {\n 'container_label': self.ingest_container_label,\n 'interval_mins': self.ingest_interval_mins,\n 'poll': self.ingest_poll,\n 'start_time_epoch_utc': self.ingest_start_time\n }\n\n return asset_json",
"def _json(self, data):\n if len(data) == 0:\n return \"\"\n if self.meta:\n data['meta_history'] = [{'prog': __prog__,\n 'release': __release__,\n 'author': __author__,\n 'date': __now__},]\n return json.dumps(data) + \"\\n\"",
"def detailed_json(self, absolutize_url):\n template = {}\n template.update({\n \"id\": self.image_id,\n \"links\": self.links_json(absolutize_url),\n \"name\": self.name,\n \"minRam\": self.minRam,\n \"minDisk\": self.minDisk,\n \"OS-EXT-IMG-SIZE:size\": self.image_size,\n \"com.rackspace__1__ui_default_show\": self.is_default,\n \"created\": \"1972-01-01_15-59-11\",\n \"updated\": \"1972-01-01_15-59-11\",\n \"status\": \"ACTIVE\",\n \"progress\": 100,\n \"metadata\": self.metadata_json()\n })\n return template",
"def detailed_json(self, absolutize_url):\n template = {}\n template.update({\n \"id\": self.image_id,\n \"links\": self.links_json(absolutize_url),\n \"name\": self.name,\n \"minRam\": self.minRam,\n \"minDisk\": self.minDisk,\n \"OS-EXT-IMG-SIZE:size\": self.image_size,\n \"com.rackspace__1__ui_default_show\": self.is_default,\n \"created\": \"1972-01-01_15-59-11\",\n \"updated\": \"1972-01-01_15-59-11\",\n \"progress\": 100,\n \"status\": \"ACTIVE\",\n \"metadata\": self.metadata_json()\n })\n return template",
"def ajax_get_statistics():\r\n return jsonify(generate_statistics())",
"def generate(self, sorted=False):\n json_result = json.dumps(self.generate_dict(), sort_keys=sorted)\n return json_result",
"def get_json(self):\n return {\n \"power\": self.get_power(), \n \"timestamp\": self.get_timestamp(), \n \"shortage\": self.get_shortage()\n }",
"def get_chart_one(request):\r\n json_str = []\r\n \r\n usuarios = Usuario.objects.all()\r\n for usuario in usuarios:\r\n peticiones = Peticion.objects.filter(usuario=usuario)\r\n json_str.append({ \r\n 'name': u'%s %s' % (usuario.persona.nombre,\r\n usuario.persona.apellidos),\r\n 'data': len(peticiones)\r\n }) \r\n json_obj = json.dumps(json_str, sort_keys=True, indent=4)\r\n response = HttpResponse(json_obj, mimetype=\"application/json\") \r\n return response",
"def to_json(self):\n\n d = {\n \"title\": self.title,\n \"abstract\": self.abstract,\n \"intellectual_merit\": self.intellectual_merit,\n \"broader_impact\": self.broader_impact,\n \"use_of_fg\": self.use_of_fg,\n \"scale_of_use\": self.scale_of_use,\n \"categories\": self.categories,\n \"keywords\": self.keywords,\n \"primary_discipline\": self.primary_discipline,\n \"orientation\": self.orientation,\n \"contact\": self.contact,\n \"url\": self.url,\n \"active\": self.active,\n \"status\": self.status,\n \"lead\": self.lead,\n \"members\": self.members,\n \"resources_services\": self.resources_services,\n \"resources_software\": self.resources_software,\n \"resources_clusters\": self.resources_clusters,\n \"resources_provision\": self.resources_provision\n }\n return d",
"def as_json(self):",
"def create_json(self):\n data = {\"image_id\": self.ids, \"img_path\": self.img_paths, \"bg\": self.bgs}\n if hasattr(self, \"bbox\"):\n data[\"bbox\"] = self.bbox\n if hasattr(self, \"masks\"):\n data[\"masks\"] = self.masks\n with open(f\"{self.save_path}{self.name}/json/images_info.json\", \"w\") as f:\n json.dump(data, f)",
"def get_json(self):\n data = {}\n data['ip'] = self.ip\n\n try:\n data['country'] = self.processedvtdata[\"country\"]\n except KeyError:\n data['country'] = 'None'\n try:\n data['as'] = self.processedvtdata[\"as_owner\"]\n except KeyError:\n data['as'] = 'None'\n try:\n data['rdns'] = self.processedvtdata[\"self.reversedns\"]\n except KeyError:\n data['rdns'] = 'None'\n try:\n data['label'] = self.expertlabel\n except AttributeError:\n data['label'] = ''\n\n # geodata\n #{\"status\":\"success\",\"country\":\"Yemen\",\"countryCode\":\"YE\",\"region\":\"SA\",\"regionName\":\"Amanat Alasimah\",\"city\":\"Sanaa\",\"zip\":\"\",\"lat\":15.3522,\"lon\":44.2095,\"timezone\":\"Asia/Aden\",\"isp\":\"Public Telecommunication Corporation\",\"org\":\"YemenNet\",\"as\":\"AS30873 Public Telecommunication Corporation\",\"query\":\"134.35.218.63\"}\n if self.geodata:\n data['geodata'] = self.geodata\n \n # vt resolutions. Is a list\n data['vt'] = {}\n try:\n if self.processedvtdata['resolutions'] != 'None':\n data['vt']['resolutions'] = []\n for count, resolution_tuple in enumerate(self.processedvtdata['resolutions']):\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['date'] = resolution_tuple[0]\n temp['domain'] = resolution_tuple[1]\n data['vt']['resolutions'].append(temp)\n except KeyError:\n pass\n\n # vt urls. Is a list\n try:\n if self.processedvtdata['detected_urls'] != 'None':\n data['vt']['detected_urls'] = []\n for count, url_tuple in enumerate(self.processedvtdata['detected_urls']):\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['date'] = url_tuple[0]\n temp['url'] = url_tuple[1][0]\n temp['detections'] = str(url_tuple[1][1]) + '/' + str(url_tuple[1][2])\n data['vt']['detected_urls'].append(temp)\n except KeyError:\n pass\n\n\n # vt detected communicating samples. Is a list\n try:\n if self.processedvtdata['detected_communicating_samples'] != 'None':\n data['vt']['detected_communicating_samples'] = []\n for count, communcating_tuple in enumerate(self.processedvtdata['detected_communicating_samples']):\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['date'] = communcating_tuple[0]\n temp['detections'] = str(communcating_tuple[1][0]) + '/' + str(communcating_tuple[1][1])\n temp['sha256'] = communcating_tuple[1][2]\n data['vt']['detected_communicating_samples'].append(temp)\n except AttributeError:\n pass\n\n # vt detected downloaded samples. Is a list\n try:\n if self.processedvtdata['detected_downloaded_samples'] != 'None':\n data['vt']['detected_downloaded_samples'] = []\n for count, detected_tuple in enumerate(self.processedvtdata['detected_downloaded_samples']):\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['date'] = detected_tuple[0]\n temp['detections'] = str(detected_tuple[1][0]) + '/' + str(detected_tuple[1][1])\n temp['sha256'] = detected_tuple[1][2]\n data['vt']['detected_downloaded_samples'].append(temp)\n except AttributeError:\n pass\n\n # vt referrer downloaded samples. Is a list\n try:\n if self.processedvtdata['detected_referrer_samples'] != 'None':\n data['vt']['detected_referrer_samples'] = []\n for count, referrer_tuple in enumerate(self.processedvtdata['detected_referrer_samples']):\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['sha256'] = referrer_tuple[0]\n temp['detections'] = str(referrer_tuple[1][0]) + '/' + str(referrer_tuple[1][1])\n data['vt']['detected_referrer_samples'].append(temp)\n except AttributeError:\n pass\n\n # pt data\n data['pt'] = {}\n if self.processedptdata:\n count = 0\n data['pt']['passive_dns'] = []\n for result in self.processedptdata_results:\n if count >= self.amount_to_print:\n break\n temp = {}\n temp['lastseen'] = result[0]\n temp['firstseen'] = result[1][0]\n temp['hostname'] = result[1][1]\n data['pt']['passive_dns'].append(temp)\n count += 1\n\n # shodan data\n try:\n if self.shodandata:\n data['shodan'] = self.shodandata\n except AttributeError:\n pass\n\n data = json.dumps(data)\n return data",
"def save_to_json(self):\r\n file = col.defaultdict(list)\r\n data_sources = [\"http://www.gcmap.com/\",\r\n \"http://www.theodora.com/country_digraphs.html\",\r\n \"http://www.citypopulation.de/world/Agglomerations.html\",\r\n \"http://www.mongabay.com/cities_urban_01.htm\",\r\n \"http://en.wikipedia.org/wiki/Urban_agglomeration\",\r\n \"http://www.worldtimezone.com/standard.html\"]\r\n file[\"data_sources\"] = data_sources\r\n for code, city in self.vertices.items():\r\n metros = {}\r\n for key, val in vars(city).items():\r\n metros[key] = val\r\n file[\"metros\"].append(metros)\r\n for code, _list in self.edges.items():\r\n for edge in _list:\r\n routes = {\"ports\": [edge.start, edge.destination], \"distance\": edge.distance}\r\n second_route = {\"ports\": [edge.destination, edge.start], \"distance\": edge.distance}\r\n if second_route not in file[\"routes\"]:\r\n file[\"routes\"].append(routes)\r\n with open('../Data/save.json', 'w') as outfile:\r\n json.dump(file, outfile, indent=4)",
"def run_json(self, plot_generator):\n\n # Run the script\n\n image_data, metadata = plot_generator.go()\n\n # Encode for http send\n encoded_image = base64.b64encode(image_data)\n\n # convert to json\n data = json.dumps({'data': encoded_image,\n 'metadata': metadata})\n \n\n\n\n # Write response\n self.wfile.write(data)",
"def json_out(self):\n temp_json = json.dumps(self.ecat_info, indent=4)\n print(temp_json)",
"def api_html():\n\n return jsonify({'version': __version__})",
"def to_json(self):\n template = {\n \"tensorName\": self.title,\n \"tensorShape\": list(self.vector_shape),\n \"tensorPath\": self.vector_url,\n \"metadataPath\": self.metadata_url,\n }\n if self.sprite_url is not None:\n template[\"sprite\"] = {\n \"imagePath\": self.sprite_url,\n \"singleImageDim\": list(self.image_size),\n }\n return template",
"def get_chart_two(request):\r\n json_str = []\r\n \r\n usuarios = Usuario.objects.all()\r\n for usuario in usuarios:\r\n peticiones = CasoPrueba.objects.filter(usuario=usuario)\r\n total = CasoPrueba.objects.all()\r\n json_str.append({ \r\n 'name': u'%s %s' % (usuario.persona.nombre,\r\n usuario.persona.apellidos),\r\n 'data': len(peticiones),\r\n 'total': len(total)\r\n }) \r\n json_obj = json.dumps(json_str, sort_keys=True, indent=4)\r\n response = HttpResponse(json_obj, mimetype=\"application/json\") \r\n return response",
"def format_json(self,query_results):\n results=query_results.data\n factory=factory_json()\n dump=factory.dumps(results)\n print(dump)\n # TODO return output for this\n return \"\"",
"def create_json_report(output):\n # Initial work, just dump mia_metrics and dummy_metrics into a json structure\n return json.dumps(output, cls=NumpyArrayEncoder)",
"def json_format(data):\n return {\n 'Title': data[\"title\"],\n 'Publication date': data['pubDate'],\n 'News link': data['link'],\n 'Image link': data['media'],\n }",
"def visualise():\n\n column = request.form.getlist('columnName')\n regions = request.form.getlist('raw_regions')\n #take the single string and return a list\n regions = query_proc.prep_regions(regions)\n #get that tables of interst\n table = query_proc.column_to_table(column)\n\n var_data = query_proc.get_region_data(table, column, regions)\n minval = query_proc.get_region_data_min(table, column, regions)\n maxval = query_proc.get_region_data_max(table, column, regions)\n\n #column diction to get human fiendly designation\n column_dict = name_column.get_name_column_dict()\n real_column = column_dict[column[0]]\n\n\n ##packing for the template\n region = regions[0]\n min_max = [minval, maxval]\n step = query_proc.calc_steps(min_max)\n min_max.append(step)\n\n min_max = json.dumps(min_max)\n json_vardata = json.dumps(var_data)\n\n return render_template('visualise.html',\n title='Data on a Map!',\n column=column,\n real_column=real_column,\n region=region,\n min_max=min_max,\n json_vardata=json_vardata)",
"def as_json(self):\n result = super().as_json()\n result[\"generator\"].update({\n \"block\": self.vein.with_purity(100).as_json(),\n \"cluster-size\": self.cluster_size,\n \"type\": \"cluster\",\n })\n return result",
"def render(self, data):\n logging.info(\"render (start)\")\n\n seria = json.dumps(data, ensure_ascii=False, indent=4)\n logging.info(\"rendered %s characters (end)\" % len(seria))\n return seria",
"def data_json(self, extra_context=None, publish=False):\n if not self.project.CREATE_JSON:\n # nothing to see here, but the right mimetype\n return jsonify()\n\n if not self.data:\n # this sets site.data by spreadsheet or gdoc\n self.get_context(publish)\n\n return jsonify(self.data)",
"def json(self):\n return {'id': self.id, 'name': self.name, 'description': self.description}",
"def do_json(pidx):\n status = \"200 OK\"\n if pidx == 0:\n name = f\"{BASEDIR}/scripts/__init__.py\"\n loader = importlib.machinery.SourceFileLoader(\"scripts\", name)\n spec = importlib.util.spec_from_loader(loader.name, loader)\n mod = importlib.util.module_from_spec(spec)\n loader.exec_module(mod)\n data = mod.data\n else:\n name = get_script_name(pidx)\n if not os.path.isfile(name):\n sys.stderr.write(f\"autoplot/meta 404 {name}\\n\")\n status = \"404 Not Found\"\n output = \"\"\n response_headers = [\n (\"Content-type\", \"application/json\"),\n (\"Content-Length\", str(len(output))),\n ]\n return output, status, response_headers\n try:\n timing = get_timing(pidx)\n except Exception:\n timing = -1\n loader = importlib.machinery.SourceFileLoader(f\"p{pidx}\", name)\n spec = importlib.util.spec_from_loader(loader.name, loader)\n mod = importlib.util.module_from_spec(spec)\n loader.exec_module(mod)\n data = mod.get_description()\n defaults = data.pop(\"defaults\", {\"_r\": \"t\", \"dpi\": \"100\"})\n data[\"maptable\"] = hasattr(mod, \"geojson\")\n data[\"highcharts\"] = hasattr(mod, \"highcharts\")\n data[\"timing[secs]\"] = timing\n\n # Setting to None disables\n if \"_r\" not in defaults or defaults[\"_r\"] is not None:\n data[\"arguments\"].append(\n dict(\n type=\"select\",\n options=FIGSIZES_NAMES,\n name=\"_r\",\n default=defaults.get(\"_r\", \"t\"),\n label=\"Image Pixel Size @100 DPI\",\n )\n )\n data[\"arguments\"].append(\n dict(\n type=\"int\",\n name=\"dpi\",\n default=defaults.get(\"dpi\", \"100\"),\n label=\"Image Resolution (DPI) (max 500)\",\n )\n )\n output = json.dumps(data)\n\n response_headers = [(\"Content-type\", \"application/json\")]\n return output, status, response_headers",
"def to_multiple_jsons(self):\n self.error_throw('output')\n\n if self.rank_method == methods_of_ranking[3]: #'diversified_ranking'\n self.output_div('multiple_jsons')\n else:\n self.output('multiple_jsons')",
"def export_verbose_json(self):\n self.export_json(verbosejson=True)",
"def GetJSON(self):\n return json.dumps(self.GetDict())",
"def graph():\n return jsonify(app.config[\"jsonified\"])",
"def json(self):\n d = OrderedDict()\n if self.id is not None:\n d[\"id\"] = str(self.id)\n d[\"status\"] = self.http_status\n d[\"title\"] = self.title\n if self.about:\n d[\"links\"] = OrderedDict()\n d[\"links\"][\"about\"] = self.about\n if self.code:\n d[\"code\"] = self.code\n if self.detail:\n d[\"detail\"] = self.detail\n if self.source_pointer or self.source_parameter:\n d[\"source\"] = OrderedDict()\n if self.source_pointer:\n d[\"source\"][\"pointer\"] = self.source_pointer\n if self.source_parameter:\n d[\"source\"][\"parameter\"] = self.source_parameter\n if self.meta:\n d[\"meta\"] = meta\n return d",
"def createJS(data):\n treeData = 'var data = ' + json.dumps(data[\"tree\"]) + ';'\n htmlFile = open(\"123456.html\",'w')\n jsFile = open(\"data.js\",'w')\n jsFile.write(treeData)\n jsFile.close()",
"def render_json(self, obj):\n self.response.content_type = \"application/json\"\n self.response.out.write(json.encode(obj))",
"def to_json(self, filename=None, format=None):\n if format == \"widget\":\n data = {\n \"nodes\": [n.to_json(format) for n in self.nodes],\n \"links\": [l.to_json(format) for l in self.links],\n \"order\": self.ordering.layers,\n \"groups\": self.groups,\n }\n else:\n data = {\n \"format\": \"sankey-v2\",\n \"metadata\": {\n \"title\": \"A Sankey diagram\",\n \"authors\": [],\n \"layers\": self.ordering.layers,\n },\n \"nodes\": [n.to_json(format) for n in self.nodes],\n \"links\": [l.to_json(format) for l in self.links],\n \"groups\": self.groups,\n }\n\n if filename is None:\n return data\n else:\n with open(filename, \"wt\") as f:\n json.dump(data, f)",
"def brief_json(self, absolutize_url):\n template = {}\n template.update({\n \"id\": self.image_id,\n \"links\": self.links_json(absolutize_url),\n \"name\": self.name\n })\n return template",
"def get_json(self):\n json_item = {\"id: \": self.id,\n \"question: \": self.question,\n \"documents: \": self.documents,\n \"document_ids: \": self.document_ids,\n \"gold answers: \": self.gold}\n return json_item",
"def generate(self, info):\n js_info = {}\n\n # Set target\n js_info[\"target\"] = info.target\n\n # Set time info\n js_info[\"start_time\"] = info.start_time.strftime(\"%H-%m-%Y %H:%M:%S\")\n js_info[\"end_time\"] = info.end_time.strftime(\"%H-%m-%Y %H:%M:%S\")\n\n # WordPress info\n js_info[\"wordpress\"] = {\n \"current_version\": info.wordpress_info.current_version,\n \"last_version\": info.wordpress_info.latest_version,\n \"outdated\": info.wordpress_info.is_outdated,\n \"cves\": [x for x in info.wordpress_info.vulnerabilities]\n }\n\n # Plugins info\n js_info[\"plugins\"] = []\n for plugin in info.plugins:\n\n json_plugin = {}\n json_plugin[\"plugin_name\"] = plugin.plugin_name\n\n json_plugin[\"current_version\"] = plugin.current_version\n json_plugin[\"last_version\"] = plugin.latest_version\n json_plugin[\"url\"] = plugin.plugin_uri\n json_plugin[\"outdated\"] = plugin.is_outdated\n\n # Set CVE\n json_plugin[\"cves\"] = [cve for cve in plugin.cves]\n\n # Set exploits\n json_plugin[\"exploits\"] = [exploit for exploit in plugin.exploits]\n\n js_info[\"plugins\"].append(json_plugin)\n\n return js_info",
"def getJSON(self):\n text = super().getJSON() + f', \"exchange\": \"{self.__exchange}\"'\n text += f', \"market pair\": \"{self.__market_pairs}\"'\n text += f', \"interval\": \"{self.__interval}\"}}'\n return text",
"def data():\n result = {}\n for thread in DATA.threads:\n result[thread] = [formatNode(node) for node in DATA.threads[thread].tree]\n return json.dumps({\n 'checkpoints': DATA.checkpoints,\n 'threads': result\n })",
"def output_json(data, code, headers=None):\n #data[\"timestamp\"] = datetime.now()\n return jsonify(data)",
"def to_json_string(self):\n\t\treturn json.dumps(dataclasses.asdict(self), indent=2, sort_keys=True) + \"\\n\"",
"def render_application_template(self):\n self.pipeline_config['instance_links'] = self.retrieve_instance_links()\n jsondata = get_template(\n template_file='infrastructure/app_data.json.j2', appinfo=self.appinfo, pipeline_config=self.pipeline_config)\n return jsondata",
"def index_json():\n\n response = views.get_feature_collection_metadata(config)\n\n return make_response(jsonify(response))",
"def ToJson(self):\n output = json.dumps(self.ToDictionary(), sort_keys=True, indent=4)\n return output",
"def to_single_json(self):\n self.error_throw('output')\n \n if self.rank_method == methods_of_ranking[3]: #'diversified_ranking'\n self.output_div('single_json')\n else:\n self.output('single_json')",
"def gen_json(self, show_headers=True, show_tags=True, use_objects=False):\n is_first = True\n yield \"[\\n\"\n if use_objects:\n for row in self:\n if is_first:\n is_first = False\n yield json.dumps(row.dictionary, sort_keys=True, indent=2)\n else:\n yield \",\\n\" + json.dumps(row.dictionary, sort_keys=True, indent=2)\n else:\n for raw in self.gen_raw(show_headers, show_tags):\n if is_first:\n is_first = False\n yield json.dumps(raw)\n else:\n yield \",\\n\" + json.dumps(raw)\n yield \"\\n]\\n\"",
"def __str__(self) -> str:\n obj_dict: Dict[str, Any] = {}\n obj_dict[\"doc\"] = self.doc\n obj_dict[\"type\"] = self.type\n obj_dict[\"name\"] = self.name\n\n line_range = self.line_range()\n obj_dict[\"start_line\"] = line_range[0]\n obj_dict[\"end_line\"] = line_range[1]\n\n obj_dict[\"children\"] = []\n\n for child in self.children.values():\n obj_dict[\"children\"].append(json.loads(str(child)))\n\n return json.dumps(obj_dict)",
"def to_json(self):\n capsule = {}\n capsule[\"Hierarchy\"] = []\n for (\n dying,\n (persistence, surviving, saddle),\n ) in self.merge_sequence.items():\n capsule[\"Hierarchy\"].append(\n {\n \"Dying\": dying,\n \"Persistence\": persistence,\n \"Surviving\": surviving,\n \"Saddle\": saddle,\n }\n )\n capsule[\"Partitions\"] = []\n base = np.array([None, None] * len(self.Y)).reshape(-1, 2)\n for (min_index, max_index), items in self.base_partitions.items():\n base[items, :] = [min_index, max_index]\n capsule[\"Partitions\"] = base.tolist()\n\n return json.dumps(capsule)",
"def json(self):\n beat = self.beat + 1.4 # replace with hjd\n w, h = self.getWidth(), self.getHeight()\n \n return {\n \"_time\": beat,\n \"_duration\": self.dur,\n #\"_lineIndex\": 0,\n #\"_type\": 0,\n #\"_width\": 0,\n \"_customData\": {\n # to undo the local rotation z transform we have to take trig parts of it and multiply them by the dimensions of the wall, then add them to the position\n \"_position\": [self.l + math.cos(math.radians(self.lrot[2] - 90)) * h / 2, self.d + math.sin(math.radians(self.lrot[2]-90)) * h / 2 + h / 2],\n \"_scale\": [w, h],\n \"_rotation\": self.rot,\n \"_localRotation\": self.lrot\n }\n }",
"def json_report(request):\n\n close_old_connections()\n\n # Not allow unauthenticated users.\n if not request.user.is_authenticated:\n return HttpResponseForbidden(\"You're not authenticated.\")\n \n # Get orders.\n orders = get_orders(request)\n\n # Form the audit.\n audit = {\n 'total_orders_amount': 0,\n 'total_orders_served_amount': 0\n }\n for order in orders:\n amount = order.count * order.dish.price\n if order.served:\n audit['total_orders_served_amount'] += amount\n audit['total_orders_amount'] += amount\n audit['total_amount_still_out'] = (\n audit['total_orders_amount'] - audit['total_orders_served_amount'])\n \n # Form the orders.\n orders = [\n {\n 'id': order.id,\n 'name': order.name,\n 'id_no': order.id_no,\n 'contact_no': order.contact_no,\n 'date': order.date,\n 'dish': {\n 'name': order.dish.name,\n 'id': order.dish.id,\n 'price': order.dish.price\n },\n 'count': order.count,\n 'amount': order.count * order.dish.price,\n 'served': order.served,\n 'ready': order.ready\n }\n for order in orders\n ]\n\n # Form the feedbacks.\n feedbacks = [\n {\n 'id': f.id,\n 'name': f.name,\n 'content': f.content,\n 'number': f.contact_no,\n 'date': f.date\n }\n for f in Feedback.objects.all().order_by('-date')\n ]\n\n close_old_connections()\n\n return JsonResponse({\n 'orders': orders,\n 'feedbacks': feedbacks,\n 'audit': audit\n })",
"def json(self):\n return {\n 'author': self.author,\n 'email': self.email,\n 'display_email': self.display_email,\n 'title': self.title,\n 'trailer_path': self.trailer_path,\n 'date': self.date,\n 'link': self.link,\n '_id': self._id\n }",
"def json_frapp(request):\n from pv.settings import MEDIA_URL\n\n if request.GET.get('date') == None:\n start = datetime.combine(date.today(), time(0, 0))\n else:\n start = datetime.combine( datetime.strptime(request.GET.get('date'), '%Y-%m-%d').date(), time(0, 0))\n\n end = datetime.combine(start, time(23, 59))\n\n timeslots = TimeSlot.objects.filter(start__gte=start,start__lte=end).select_related('show').order_by('start')\n\n\n '''Generate categories object for output'''\n\n categories = Category.objects.all()\n categories_output = []\n\n for c in categories:\n c_entry = {\n 'id': c.id,\n 'color': c.color.replace('#', '').upper(),\n 'namedisplay': c.category,\n 'description': c.description\n }\n\n categories_output.append(c_entry)\n\n # Get all series for timeslots\n series = set()\n for ts in timeslots:\n series.add(ts.show)\n\n\n '''Generate series object for output'''\n\n series_output = []\n\n for s in series:\n metainfos = []\n metainfos.append({ 'key': 'ProduzentIn', 'value': ', '.join(ts.show.hosts.values_list('name', flat=True)) })\n metainfos.append({ 'key': 'E-Mail', 'value': ', '.join(ts.show.hosts.values_list('email', flat=True)) })\n\n image = '' if s.image.name == None or s.image.name == '' else str(get_current_site(request)) + MEDIA_URL + s.image.name\n url = '' if s.website == None or s.website == '' else s.website\n\n # Get active schedules for the given date\n # But include upcoming single timeslots (with rrule_id=1)\n schedules = Schedule.objects.filter( Q(show=s.id,is_repetition=False) &\n (\n Q(rrule_id__gt=1,dstart__lte=start,until__gte=start) |\n Q(rrule_id=1,dstart__gte=start)\n )\n )\n\n schedules_repetition = Schedule.objects.filter( Q(show=s.id,is_repetition=True) &\n (\n Q(rrule_id__gt=1,dstart__lte=start,until__gte=start) |\n Q(rrule_id=1,dstart__gte=start)\n )\n )\n\n broadcastinfos = ''\n\n if not schedules.exists():\n continue\n\n for schedule in schedules:\n broadcastinfos = broadcastinfos + generate_frapp_broadcastinfos(schedule)\n\n if schedules_repetition.exists():\n broadcastinfos = broadcastinfos + 'Wiederholung jeweils:'\n for schedule in schedules_repetition:\n broadcastinfos = broadcastinfos + generate_frapp_broadcastinfos(schedule)\n\n s_entry = {\n 'id': s.id,\n 'categoryid': s.category.values_list('id', flat=True)[0],\n 'color': s.category.values_list('color', flat=True)[0].replace('#', '').upper(),\n 'namedisplay': s.name,\n 'description': s.description,\n 'url': url,\n 'image': image,\n 'broadcastinfos': broadcastinfos,\n 'metainfos': metainfos\n }\n\n series_output.append(s_entry)\n\n\n '''Generate shows object for output'''\n\n shows_output = []\n\n for ts in timeslots:\n\n is_repetition = ' ' + _('REP') if ts.schedule.is_repetition is 1 else ''\n namedisplay = ts.show.name + is_repetition\n description = ts.show.description\n url = str(get_current_site(request)) + '/shows/' + ts.show.slug\n urlmp3 = ''\n\n # If there's a note to the timeslot use its title, description and url\n try:\n note = Note.objects.get(timeslot=ts.id)\n namedisplay = note.title + is_repetition\n description = note.content\n url = str(get_current_site(request)) + '/notes/' + note.slug\n urlmp3 = note.audio_url\n except ObjectDoesNotExist:\n pass\n\n ts_entry = {\n 'id': ts.id,\n 'seriesid': ts.show.id,\n 'datetimestart': ts.start.strftime('%d.%m.%Y %H:%M:%S'),\n 'datetimeend': ts.end.strftime('%d.%m.%Y %H:%M:%S'),\n 'namedisplay': namedisplay,\n 'description': description,\n 'url': url,\n 'urlmp3': urlmp3,\n }\n\n shows_output.append(ts_entry)\n\n output = {}\n output['categories'] = categories_output\n output['series'] = series_output\n output['shows'] = shows_output\n\n return HttpResponse(json.dumps(output, ensure_ascii=False).encode('utf8'),\n content_type=\"application/json; charset=utf-8\")",
"def initialize_descriptive_json(json_filename,wk_dir,model_dir,obs_dir):\n output = {'provenance':{},'data':{},'metrics':{},'plots':{},'index': 'index.html','html':'index.html'}\n log_path = wk_dir + '/asop_coherence.log.txt'\n output['provenance'] = {'environment': get_env(),\n 'modeldata': model_dir,\n 'obsdata': obs_dir,\n 'log': log_path}\n with open(json_filename,'w') as output_json:\n json.dump(output,output_json, indent=2)\n\n return",
"def create_json(self, request, qs):\n\n j = Work.objects.get_dict(qs)\n\n response = JsonResponse(j, json_dumps_params={'indent': 4})\n name = '{}{}'.format(\n settings.PUBLISHER_CODE, datetime.now().toordinal())\n cd = 'attachment; filename=\"{}.json\"'.format(name)\n response['Content-Disposition'] = cd\n return response",
"def report_json(self):\n # type: () -> Optional[AnyStr]\n return json.dumps(self.gen_report(as_dict=True), indent=4)",
"def json(self):\n return {\n \"x\" : self.x,\n \"y\": self.y,\n \"rotation\":self.rotation,\n \"annotations\" : [a.json() for a in self.annotations]\n }",
"def make_jsons(self):\n self._jsons = [tree.to_json() for tree in self.reaction_trees]\n self._update_route_dict(self._jsons, \"json\")",
"def to_json(self):\n related = {'people': list(set([p.n for p in self.people.all()])),\n 'places': list(set([p.reg for p in self.places.all()])),\n 'organizations': list(set([o.n for o in self.orgs.all()])),\n 'keywords': list(set([k.reg for k in self.ref_strings.all()]))}\n return {'id': self.id,\n 'date': self.date,\n 'section': self.section_type,\n 'subsection': self.subsection_type,\n 'article_type': self.article_type,\n 'xpath': self.xpath,\n 'content': self.content,\n 'related': related}",
"def to_json(self):\r\n\r\n object_json = dict()\r\n object_json[\"Type\"] = self.__class__.__name__\r\n game_json = dict()\r\n game_json[\"x_dist\"] = self.x_dist\r\n game_json[\"y_dist\"] = self.y_dist\r\n game_json[\"turn_number\"] = self.turn_number\r\n game_json[\"max_turns\"] = self.max_turns\r\n game_json[\"num_to_win\"] = self.num_to_win\r\n game_json[\"winner\"] = self.winner\r\n game_json[\"board\"] = self.board.to_json()\r\n game_json[\"board_history\"] = [board.to_json() for board in self.board_history]\r\n game_json[\"players\"] = [player.to_json() for player in self.players]\r\n object_json[\"Object\"] = game_json\r\n\r\n return json.dumps(object_json)",
"def create_gen_json(self, out_file):\n\n params = self.create_package_dict()\n with open(out_file, 'w') as fp:\n json.dump(params, fp)",
"def to_json_string(self):\n\t\treturn json.dumps(self.to_dict(), indent=2, sort_keys=True) + \"\\n\"",
"def to_json(self):\n pass",
"def site_map(url):\n list_of_urls = []\n start_time = time.time()\n\n list_of_urls = get_urls(url, list_of_urls, url)\n\n result = get_titles_and_links(list_of_urls, url)\n\n # print(result)\n print(json.dumps(result, indent=2))\n print(\"--- %s seconds ---\" % (time.time() - start_time))\n\n with open(r'data.json', 'w') as outfile:\n json.dump(result, outfile)",
"def toJSON(self):\r\n\r\n jsonToRet = []\r\n rowJson = []\r\n matrixJson = []\r\n\r\n if len(self.slctData) > 100:\r\n self.getSimMatSummary(100)\r\n jsonToRet.append(self.summaryOrdering)\r\n for i in range(0,len(self.simMatSmm)):\r\n for n in self.simMatSmm[i]:\r\n rowJson.append(n)\r\n matrixJson.append(rowJson)\r\n rowJson = []\r\n jsonToRet.append(matrixJson)\r\n\r\n jsonToRet.append(self.patchOrdering)\r\n # jsonToRet = []\r\n rowJson = []\r\n matrixJson = []\r\n\r\n for i in range(0,len(self.simMat)):\r\n for n in self.simMat[i]:\r\n rowJson.append(n)\r\n matrixJson.append(rowJson)\r\n rowJson = []\r\n jsonToRet.append(matrixJson)\r\n return jsonToRet",
"def get_json():\n data_list = []\n\n for page in range(1,13):\n url = BASE_URL + STYLE_URL + \"&\" + PAGINATION_URL + str(page)\n print(page, \"pages processed\")\n try:\n response = requests.get(url, timeout=METADATA_REQUEST_TIMEOUT)\n data = response.json()['Paintings']\n parse_data(data_list, data)\n except requests.exceptions.RequestException as e:\n print(e)\n sys.exit(1)\n\n return data_list",
"def index():\n\n return jsonify()",
"def convertToJson(self, article):\n json_obj = {}\n for sec in article.children('div.section'):\n sec('div.section')\n # TODO: Format each section\n return json_obj",
"def summaryJSON(self, filename=None):\n d = self.robotGridSummaryDict()\n if filename is not None:\n with open(filename, \"w\") as f:\n json.dump(d, f, separators=(',', ':'))\n else:\n return json.dumps(d)",
"def dict(self):\n\t\treturn self.json",
"def json(self):\n\n json = {}\n json['type'] = self.type\n json['value'] = self.value\n json['status'] = self.status\n json['tags'] = list(set(self.tags))\n json['relationships'] = list(set(self.relationships))\n json['whitelisted'] = self.whitelisted\n json['path'] = self.path\n\n return json",
"def make_dict(self):\n return self.generate_widgets()",
"def _repr_html_(self):\n params = OrderedDict()\n params[\"Name\"] = self.name\n params[\"Description\"] = self.description\n params[\"Ns\"] = self.Ns\n params[\"Ni\"] = self.Ni\n params[\"Kinetic Parameter\"] = self.kinetic_parameter_type\n params[\"Kinetic Parameter Value\"] = self.kinetic_parameter_value \n \n header = \"<table>\"\n footer = \"</table>\"\n html = \"\"\n\n for key, val in params.items():\n html += \"<tr><td>{0}</td><td>{1}</td></tr>\".format(key, val)\n\n return header + html + footer",
"def generate_visualization_dict(self):\n self._data = {}\n self._data['name'] = self.name\n self._data['type'] = self.__repr__()\n self._data['color'] = self._color_rgb\n\n try:\n self._data['simulation_matrix'] = \\\n self._visualization_matrix.tolist()\n\n except:\n #Not sure which error to call here.\n raise RuntimeError('''Please call the numerical\n transformation methods,\n before generating simulation dict ''')\n\n\n return self._data",
"def make_modified_json(num_urls):\n json_path = os.path.join(CHROMIUM_SRC, 'tools/perf/page_sets/data/')\n json_template = ('{{'\n '\"description\": \"Describes the Web Page Replay archives for a user '\n 'story set. Dont edit by hand! Use record_wpr for updating.\", '\n '\"archives\": {{ \"url{0}_page_set_000.wpr\": [\"url{0}\"]}}}}\\n')\n for i in range(num_urls):\n modified_name = str(i) + '_pc'\n file_name = 'url{0}_page_set.json'\n with open('{0}{1}'.format(json_path,\n file_name.format(modified_name)), 'wb') as f:\n f.write(json_template.format(modified_name))",
"def generate_json_network_reports(init_date, last_date):\n\n report = network_report_for_carrier(init_date, last_date)\n\n save_json_report_to_file(report, init_date.year, init_date.month,\n \"network_report_\")",
"def to_json_string(self):\n return json.dumps(self.to_dict(), indent = 2, sort_keys = True) + \"\\n\"",
"def to_json(self):\n return [\"population\", self.species_index, self.card_trade_index]",
"def as_json(self):\n\n return {\n \"name\": self.name,\n \"summary\": self.summary.as_json(),\n \"cases\": [case.as_json() for case in self.cases]\n }",
"def to_dict(self):\n# \"\"\" The JSON model used is like:\n# <code>\n#{\n# \"duration\": 15,\n# \"url\": \"url1\",\n# \"selections\": [{\n# \"annotations\": [{\n# \"author\": \"\",\n# \"description\": \"speaker\",\n# \"keyword\": \"john\",\n# \"lang\": \"EN\"\n# },\n# {\n# \"author\": \"\",\n# \"description\": \"speakerLabel\",\n# \"keyword\": \"S0\",\n# \"lang\": \"EN\"\n# }\n# , {\n# \"author\": \"\",\n# \"description\": \"gender\",\n# \"keyword\": \"F\",\n# \"lang\": \"EN\" \n# }],\n# \"resolution\": \"0x0\",\n# \"selW\": 20,\n# \"selH\": 15,\n# \"selY\": 10,\n# \"selX\": 10,\n# \"startTime\" : 0,\n# \"endTime\" : 10\n# \n# }]\n#}\n# </code>\n# \n# \"\"\"\n\n dic = {\"duration\": self.get_duration(),\n \"url\": self._filename,\n \"db\":self.get_db().get_path(),\n \"selections\": [] }\n for seg in self.get_time_slices():\n dic['selections'].append({\n \"startTime\": float(seg[0]) / 100.0,\n \"endTime\": float(seg[1]) / 100.0,\n 'speaker': seg[-2],\n 'speakerLabel': seg[-1],\n 'gender': seg[2],\n 'speakers': seg[3]\n })\n return dic",
"def print_json(results):\r\n import json\r\n stats = calc_stats(results)\r\n print(json.dumps(stats._asdict()))",
"def json(self):\n return {'User_uuid': self.uuid, 'School_id': self.school_id, 'Earned_points': self.us_dollar}",
"def to_json_string(self):\n return json.dumps(self.to_dict(), indent=2, sort_keys=True) + \"\\n\"",
"def get_json(self):\n return {'name': self.name, \n 'path': self.path, \n 'enabled': self.enabled}",
"def japaneseGraph():\n japaneseVocabularyTrajectory = JapaneseVocabularyTrajectory.query.all()\n data = []\n for row in japaneseVocabularyTrajectory:\n data.append([row.date.strftime(\"%Y/%m/%d\"),row.number])\n return json.dumps(data)",
"def export_json_graph(self, destpath):\n export = {}\n export['vertices'] = self.vertices\n export['edges'] = self.edges\n export['_totals'] = {}\n export['_photo'] = {}\n export['_photo']['credit'] = self.photo['credit']\n export['_photo']['entity_max'] = self.photo['max']\n export['_totals']['media'] = len(self.media)\n export['_totals']['wilds'] = len(self.wilds)\n export['_totals']['zoos'] = len(self.zoos)\n export['_totals']['locations'] = len(self.wilds) + len(self.zoos)\n export['_totals']['pandas'] = self.sum_pandas()\n export['_totals']['last_born'] = self.summary['birthday']\n export['_totals']['last_died'] = self.summary['death']\n with open(destpath, 'wb') as wfh:\n wfh.write(json.dumps(export, \n ensure_ascii=False,\n indent=4,\n sort_keys=True).encode('utf8'))\n print(\"Dataset exported: %d pandas at %d locations (%d wild, %d zoo)\"\n % (export['_totals']['pandas'], export['_totals']['locations'],\n export['_totals']['wilds'], export['_totals']['zoos']))",
"def generateHtml(self):\n # only the master processor needs to do this\n if not self.master: return\n\n for page in self.layout.pages:\n \n # build the metric dictionary\n metrics = {}\n page.models = []\n for fname in glob.glob(os.path.join(self.output_path,\"*.nc\")):\n with Dataset(fname) as dataset:\n mname = dataset.getncattr(\"name\")\n if mname != \"Benchmark\": page.models.append(mname)\n if not dataset.groups.has_key(page.name): continue\n group = dataset.groups[page.name]\n\n # if the dataset opens, we need to add the model (table row)\n metrics[mname] = {}\n \n # each model will need to have all regions\n for region in self.regions: metrics[mname][region] = {}\n \n # columns in the table will be in the scalars group\n if not group.groups.has_key(\"scalars\"): continue\n \n # we add scalars to the model/region based on the region\n # name being in the variable name. If no region is found,\n # we assume it is the global region.\n grp = group.groups[\"scalars\"]\n for vname in grp.variables.keys():\n found = False\n for region in self.regions:\n if region in vname: \n found = True\n var = grp.variables[vname]\n name = vname.replace(region,\"\")\n metrics[mname][region][name] = Variable(name = name,\n unit = var.units,\n data = var[...])\n if not found:\n var = grp.variables[vname]\n metrics[mname][\"global\"][vname] = Variable(name = vname,\n unit = var.units,\n data = var[...])\n page.setMetrics(metrics)\n \n # write the HTML page\n f = file(os.path.join(self.output_path,\"%s.html\" % (self.name)),\"w\")\n f.write(str(self.layout))\n f.close()",
"def json(self):\n return {\n 'uri': self.view_uri,\n 'created': time.strftime('%c', time.gmtime(self.created)),\n 'created_timestamp': self.created,\n 'exception_type': str(self.exc_type),\n 'exception': str(self.exc_value),\n }",
"def toJSON(self):\n raise NotImplementedError()",
"def display_json(self, results, verbose):\n print(json.dumps(results))"
] | [
"0.6761482",
"0.67505777",
"0.63986033",
"0.6352996",
"0.6321673",
"0.63178545",
"0.62238723",
"0.62133825",
"0.6212409",
"0.6167629",
"0.6154408",
"0.6149698",
"0.612473",
"0.61238694",
"0.6115323",
"0.60985136",
"0.6090451",
"0.60752165",
"0.6065477",
"0.606487",
"0.6025067",
"0.6024606",
"0.6006279",
"0.5997201",
"0.5969014",
"0.5954983",
"0.5954588",
"0.5951506",
"0.5947118",
"0.59392744",
"0.5927195",
"0.5924854",
"0.59223783",
"0.5921754",
"0.59137994",
"0.5909943",
"0.58935606",
"0.58903825",
"0.5886294",
"0.58756053",
"0.5870497",
"0.58516365",
"0.5845435",
"0.58340883",
"0.5833091",
"0.58308387",
"0.58234715",
"0.58160985",
"0.5811209",
"0.58111346",
"0.58077925",
"0.5800023",
"0.5794135",
"0.57804334",
"0.57716477",
"0.5767536",
"0.5766405",
"0.5764146",
"0.5761382",
"0.57597995",
"0.57594985",
"0.5759306",
"0.57373303",
"0.573639",
"0.573195",
"0.57293826",
"0.5724654",
"0.57196236",
"0.57188576",
"0.57019734",
"0.5695597",
"0.5692235",
"0.56872666",
"0.5680201",
"0.56800544",
"0.56743705",
"0.5672552",
"0.56725127",
"0.5663788",
"0.5663566",
"0.56526303",
"0.5651116",
"0.56498665",
"0.5644909",
"0.56409657",
"0.5639777",
"0.56342095",
"0.5633862",
"0.5627299",
"0.5622155",
"0.5621808",
"0.5619716",
"0.56187135",
"0.5608415",
"0.5608091",
"0.5605178",
"0.55999106",
"0.5597321",
"0.5594992",
"0.55922663",
"0.5583844"
] | 0.0 | -1 |
convert to open airspace format | def make_open_airspace_format(self):
# Extract coordinates from KML
for idxline in range(len(self.kml_lines)):
if '<name>' in self.kml_lines[idxline]:
self.name = self.kml_lines[idxline].replace('\t', '').replace('<name>', '').replace('</name>', '').replace('\n','')
if not self.name.startswith('TS'):
self.name = 'TS_' + self.name
print('Type: %s | Name: %s' % (self.as_type, self.name))
if '<coordinates>' in self.kml_lines[idxline]:
self.coordinates_kml = self.kml_lines[idxline + 1].replace('\t', '').replace('\n', '')
break
# start conversion to airspace format
""" AC A
AN TS_Erzgeb
AL FL98
AH FL99
DP 50:26:22 N 012:17:59 E
DP 50:25:25 N 012:18:26 E
DP 50:24:40 N 012:19:01 E
DP 50:24:06 N 012:19:46 E"""
# AC A
self.txt_lines.append('AC %s\n' % self.as_type)
# AN TS_Erzgeb
self.txt_lines.append('AN %s\n' % self.name)
# heights
self.txt_lines.append('AL FL98\n')
self.txt_lines.append('AH FL99\n')
# coordinates
for coo_pt in self.coordinates_kml.split(' ')[:-1]:
# Target format: DP 50:26:22 N 012:17:59 E
lat_long = coo_pt.split(',')
# latitude
latDecAsStr = lat_long[1].split('.')
#if '.' not in latDecAsStr: # take care of case "51" instead of "51.123456"
# latDecAsStr += '.000000'
lat_degree = abs(int(latDecAsStr[0]))
#print(f'latDecAsStr {latDecAsStr}')
if len(latDecAsStr)==1:
latDecAsStr.append('0')
lat_secondDec = (float('0.' + latDecAsStr[1])*60) % 1
lat_minute = round((float('0.' + latDecAsStr[1])*60) - lat_secondDec)
lat_second = round(lat_secondDec*60)
cooString = ('DP %02d:%02d:%02d' %(lat_degree,lat_minute,lat_second))
if latDecAsStr[0].startswith('-'):
cooString += ' S'
else:
cooString += ' N'
# longitude
#print(f'converting lat_long {lat_long}')
# take care of case: no decimal sign included, case "11" instead of "11.123456"
if '.' not in lat_long[0]:
lat_long[0] += '.0'
lonDecAsStr = lat_long[0].split('.')
lon_degree = abs(int(lonDecAsStr[0]))
lon_secondDec = (float('0.' + lonDecAsStr[1]) * 60) % 1
lon_minute = round((float('0.' + lonDecAsStr[1]) * 60) - lon_secondDec)
lon_second = round(lon_secondDec * 60)
cooString += (' %03d:%02d:%02d' % (lon_degree, lon_minute, lon_second))
if lonDecAsStr[0].startswith('-'):
cooString += ' W'
else:
cooString += ' E'
cooString += '\n'
self.txt_lines.append(cooString) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def make_json_airspace_format(self):\n # The previous fct make_open_airspace_format already stored, coordinates_kml, name and type\n # This data is collected in an dictionary, which then is stored as json.\n # initialize dict\n coordinates_as_list_of_floats = []\n # run through coordinates\n coordinates_as_list_of_floats = []\n for coo_pt in self.coordinates_kml.split(' ')[:-1]:\n lat_long = coo_pt.split(',')\n coordinates_as_list_of_floats.append([float(lat_long[1]), float(lat_long[0])])\n # make json dict\n # rename name if not thermal space\n if self.name.startswith('TS_') and not (self.as_type == 'A' or self.as_type == 'B'):\n name_for_json = self.name[3:]\n else:\n name_for_json = self.name\n # rename airspace type for json:\n if self.as_type == 'A':\n self.as_type = 'Good_thermals'\n if self.as_type == 'B':\n self.as_type = 'Bad_thermals'\n self.json_dict = {\"AL\": \"FL98\", \"AH\": \"FL99\", \"AC\": self.as_type, \"AN\": name_for_json, \"data\": coordinates_as_list_of_floats}",
"def open_airspace_format_2_kml(self, source_file_txt):\n # load template for kml file\n self.load_kml_template(self.full_path_kml_template)\n # load airspace source\n self.load_airspace_open_air_format(source_file_txt)\n\n self.kml_lines = self.kml_template['header']\n self.kml_lines.extend(self.kml_template['good_subdivided']['head'])\n # collect all A and B kml lines\n kml_A = []\n kml_B = []\n # transform airspaces and attach to A and B collect-lists\n for airspace in self.airspaces:\n airspace.make_kml_format(self.kml_template)\n if airspace.as_type == 'A':\n kml_A.extend(airspace.kml_lines)\n if airspace.as_type == 'B':\n kml_B.extend(airspace.kml_lines)\n\n self.kml_lines.extend(kml_A)\n self.kml_lines.extend(self.kml_template['good_subdivided']['tail'])\n # start B part\n self.kml_lines.extend(self.kml_template['bad_subdivided']['head'])\n self.kml_lines.extend(kml_B)\n self.kml_lines.extend(self.kml_template['bad_subdivided']['tail'])\n\n full_path_kml = source_file_txt[:-4] + '_converted.kml'\n # uisave dialog\n full_path_kml = filesavebox(default=full_path_kml, filetypes=\"*.kml\")\n if full_path_kml is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n # write to file\n f = open(full_path_kml, 'w')\n f.writelines(self.kml_lines)\n f.close()\n print('Resulting KML files was saved to: %s' % full_path_kml)",
"def kml_2_open_airspace_and_json_format(self, full_path):\n # read file\n f = open(full_path,'r')\n kml = f.readlines()\n f.close()\n # find airspaces\n \"\"\"Placemark >\n < name > Bremen - Blumenthal\n Thermikplatte < / name >\n < styleUrl > # inline10</styleUrl>\n < Polygon >\n < tessellate > 1 < / tessellate >\n < outerBoundaryIs >\n < LinearRing >\n < coordinates >\n 8.529121049900063, 53.19549566929423, 0\n 8.52324583919868, 53.21131939607898, 0\n 8.545439298799483, 53.23055800702935, 0\n 8.588991466114615, 53.23047069814625, 0\n 8.575289966189502, 53.20745451706468, 0\n 8.560633120477348, 53.19724609335408, 0\n 8.529121049900063, 53.19549566929423, 0\n < / coordinates >\n \n < / LinearRing >\n < / outerBoundaryIs >\n < / Polygon >\n < / Placemark >\"\"\"\n container = []\n idxLine = 0\n did_not_pass_main_folder = True\n list_of_airspace_types_included = []\n while idxLine < len(kml):\n #print(kml[idxLine])\n #if '<Folder>' in kml[idxLine] and did_not_pass_main_folder:\n # # we have to jump over the first folder\n # print(f'Reading everything inside folder: {kml[idxLine]}')\n # did_not_pass_main_folder = False\n if '<Folder>' in kml[idxLine]: # begin of airspace\n as_type = kml[idxLine+1].replace('\\t','').replace('<name>','').replace('</name>\\n','') # <name>B</name>\n print('Reading AS-types: ' + as_type)\n list_of_airspace_types_included.append(as_type)\n #if not (as_type == 'A' or as_type == 'B'):\n # print('#### Check Folder / Airspace Types, must be \"A\" or \"B\" and try again (current %s)' % as_type)\n # msgbox('Check Folder / Airspace Types, are not \"A\" or \"B\" (current %s). Airspace E will be used for export.' % as_type)\n # as_type = 'E'\n\n if '<Placemark' in kml[idxLine]: # begin of airspace\n container = []\n if '</Placemark' in kml[idxLine]: # end of airspace\n # make sure only Polygons are stored\n for as_line in container:\n if '<Polygon>' in as_line:\n idx_lookAt_start = None\n for idx, line_of_container in enumerate(container):\n if \"<LookAt>\" in line_of_container:\n idx_lookAt_start = idx\n if \"</LookAt>\" in line_of_container:\n idx_lookAt_end = idx\n # Remove lookAt lines if necessary\n if idx_lookAt_start:\n container = container[0:idx_lookAt_start] + container[idx_lookAt_end+1::] # cut out look at part\n # append airspace to airspace list as airspace class\n self.airspaces.append(Airspace(lines=container, file_type='kml', as_type=as_type))\n container.append(kml[idxLine])\n idxLine += 1\n print('Loaded %d airspaces from KML-file (%s)' %(len(self.airspaces),full_path))\n # summary\n outlines = ['* KML conversion file, rename this line']\n json_dict = {\"circles\": [], \"polygons\": []}\n for airspace in self.airspaces:\n # prepare open-airspace formate\n outlines.append('\\n\\n') # separate airspaces\n outlines.extend(airspace.txt_lines)\n # prepare json\n json_dict['polygons'].append(airspace.json_dict)\n\n # write open airspace format\n target_path = full_path[:-4] + '_converted.txt'\n # uisave dialog\n\n target_path = filesavebox(default=target_path, filetypes=\"*.txt\")\n if target_path is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n f = open(target_path,'w')\n f.writelines(outlines)\n f.close()\n print('Result was written to: %s' % target_path)\n\n # write json:\n target_path_json = target_path[:-4] + '.json'\n\n json_string = json.dumps(json_dict)\n json_file = open(target_path_json, \"w\")\n json_file.write(json_string)\n json_file.close()\n\n # write list of airspace files for index.html for leaflet map\n print('The following airspace types have been converted:')\n print(list_of_airspace_types_included)",
"def airports(osm_path): \n return (retrieve(osm_path,'multipolygons',['aeroway'],**{'aeroway':[\"='aerodrome'\"]})).rename(columns={'aeroway': 'asset'})",
"def marshall(self):\n try:\n data = [\"x02\"] #start token\n data.extend(ac.getCarState(0, acsys.CS.CurrentTyresCoreTemp)) #0-3 - Core tyre temperatures, Degrees celcius\n data.extend(info.physics.tyreWear) #4-7 #tyre wear\n data.extend(ac.getCarState(0, acsys.CS.DynamicPressure)) #8-11 pressure of each tyre in PSI\n data.extend(ac.getCarState(0, acsys.CS.TyreDirtyLevel)) #12-15 amount of dirt on each tyre\n data.append(ac.getCarState(0, acsys.CS.SpeedMS)) #16 speed in metres/sec\n data.append(ac.getCarState(0, acsys.CS.Gear)) #17 gear number\n data.append(ac.getCarState(0, acsys.CS.BestLap)) #18 best lap time in ms\n data.append(ac.getCarState(0, acsys.CS.RPM)) #19 rpm\n data.append(ac.getCarState(0, acsys.CS.LapCount)) #20 lap count\n data.append(ac.getCarState(0, acsys.CS.LapInvalidated)) #21 is lap invalid? 0-no, 1-yes\n data.append(ac.getCarState(0, acsys.CS.LapTime)) #22 current lap time in ms\n data.append(ac.getCarState(0, acsys.CS.LastLap)) #23 last lap in ms\n data.append(ac.getCarState(0, acsys.CS.PerformanceMeter)) #24 delta time in ms from best lap?? (haven't checked)\n data.append(ac.getCarState(0, acsys.CS.Steer)) #25 steering rotation in radians\n data.append(ac.getCarName(0)) #26 name of car being driven by player\n data.append(ac.getTrackName(0)) #27 track name\n\n data.append(\"x04\") #end token\n except Exception as e:\n ac.console(\"{}\".format(e))\n return \",\".join(str(v) for v in data).encode()",
"def make_kml_format(self,kml_template):\n if self.as_type == 'A':\n self.kml_lines = kml_template['good_subdivided']['placemark']\n elif self.as_type == 'B':\n self.kml_lines = kml_template['bad_subdivided']['placemark']\n else:\n print('Unknown airspace type')\n # get idx of name and coordinates\n idxLine = 0\n while idxLine < len(self.kml_lines):\n #print(self.kml_lines[idxLine]\n if self.kml_lines[idxLine].startswith('\\t\\t\\t\\t<name>'): # begin of airspace\n idx_name = idxLine\n if '\\t\\t\\t\\t\\t\\t\\t<coordinates>\\n' in self.kml_lines[idxLine]: # begin of airspace\n idx_coordinates = idxLine+1\n idxLine += 1\n # transform coordinates\n # add all coordinates: Format is:\n # source: 'DP 50:26:22 N 012:17:59 E\\n'\n # target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0\n coo_list = [] # collect list of coorinates as strings\n for line in self.txt_lines:\n if line.startswith('AN'):\n self.name = line[3:].replace('\\n','')\n self.kml_lines[idx_name] = '\\t\\t\\t\\t<name>%s</name>\\n' % self.name\n\n if line.startswith('DP'):\n # lon\n lon_deg = float(line[14:17])\n lon_min = float(line[18:20])\n lon_sec = float(line[21:23])\n lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg\n if line[24] == 'W':\n lon_dec *= -1 # negative if west\n # lat\n lat_deg = float(line[3:5])\n lat_min = float(line[6:8])\n lat_sec = float(line[9:11])\n lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg\n if line[12] == 'S':\n lat_dec *= -1 # negative if west\n # attach coordinates\n coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec))\n # store for later plotting\n self.lat_dec.append(lat_dec)\n self.lon_dec.append(lon_dec)\n\n # make sure that shape is closed --> first an last point must be the same\n if coo_list[0] != coo_list[-1]:\n coo_list.append(coo_list[0])\n self.lat_dec.append(self.lat_dec[0])\n self.lon_dec.append(self.lon_dec[0])\n\n # write coordinate strings into kml\n self.kml_lines[idx_coordinates] = '\\t\\t\\t\\t\\t\\t\\t\\t' # is prefix. Coordinates to be added as string below\n for pt in coo_list:\n self.kml_lines[idx_coordinates] += pt\n print('Converted airspace %s' % self.name)",
"def igra2_ascii_to_dataframe(file=''):\n if debug:\n print(\"Running igra2_ascii_to_dataframe for: \", file) \n \n data = check_read_file(file=file, read=True)\n #source_file = [l for l in file.split('/') if '.txt' in l][0]\n read_data = [] # Lists containing the raw data from the ascii file, and the observation dates\n \"\"\" Data to be extracted and stored from the igra2 station files \n Some info is contained in the header of each ascent, some in the following data \"\"\"\n\n \"\"\" Initialize the variables that can be read from the igra2 files \"\"\"\n ident,year,month,day,hour,reltime,p_src,np_src,lat, lon = np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan \n lvltyp1,lvltyp2,etime,press,pflag,gph,zflag,temp,tflag,rh,dpdep,wdir,wspd = np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan # initialize to zeros\n stations_id = []\n idate = np.nan\n count = 0\n head_count = 0\n \n obs_id = 0\n \n def make_release_time(date_time, hour, release):\n \"\"\" build a sonde release time \n ex 2019 02 20 00 2349 \n ex 2019 01 10 00 0011 \n They round the release time to the closest hour. \n It can be the same day or the following !!!\n date_time = date_time pytohn object, \n date, time, release = original strings \n \"\"\"\n release_h = int(release[:2])\n release_m = int(release[2:4])\n \n if release_h == 99:\n return 0 #largest integer number int 64 \n \n else:\n if release_m == 99:\n release_m = 0\n release_date_time = date_time.replace(hour= release_h, minute= release_m) \n \n \"\"\" Here, I have to subtract one day to the release time stamp if the hour of the time stamp is in th evening,\n but the nominal time is reported at midnight hence in the following day. For example 2019 02 20 00 2349 from file VMM00048820 \"\"\"\n if hour == '00':\n if release_h > 20:\n release_date_time = release_date_time - timedelta(days=1)\n else:\n pass\n \n return release_date_time \n \n \n for i, line in enumerate(data):\n if line[0] == '#':\n head_count = head_count +1 \n # Info from the Header line of each ascent \n ident = line[1:12] # station identifier\n ident = ident[6:12]\n if ident not in stations_id:\n stations_id.append(ident)\n \n year = line[13:17] # year, months, day, hour of the observation\n month = line[18:20]\n day = line[21:23]\n hour = line[24:26] \n reltime = line[27:31] # release time of the sounding.\n numlev = int(line[32:36]) # number of levels in the sounding == number of data recorded in the ascent\n p_src = line[37:45] # data source code for the pressure levels \n np_src = line[46:54] # data source code for non-pressure levels\n lat = int(line[55:62]) / 10000. # latitude and longitude\n lon = int(line[63:71]) / 10000.\n #observation_id = i\n if int(hour) == 99:\n time = reltime + '00'\n else:\n time = hour + '0000'\n \n if '99' in time:\n time = time.replace('99', '00')\n\n idate = datetime.strptime(year + month + day + time, '%Y%m%d%H%M%S') # constructed according to CDM\n \n release_time = make_release_time(idate, hour, reltime) # making the release time \n \n \n iday = int(year + month + day)\n count = count + 1\n else:\n # Data of each ascent\n lvltyp1 = int(line[0]) # 1- 1 integer major level type indicator\n lvltyp2 = int(line[1]) # 2- 2 integer minor level type indicator\n etime = int(line[3:8]) # 4- 8 integer elapsed time since launch\n press = int(line[9:15]) # 10- 15 integer reported pressure\n \n if press == -9999:\n press = np.nan\n \n pflag = line[15] # 16- 16 character pressure processing flag\n \n gph = int(line[16:21]) # 17- 21 integer geopotential height [m]\n \n if gph == -9999 or gph == -8888: # reading the values andh check if they are missing or removed as -9999 or -8888 before dividing by 10 as the instructions say \n gph = np.nan # 23- 27 integer temperature, [Celsius to Kelvin ] \n \n zflag = line[21] # 22- 22 character gph processing flag, \n \n temp = int(line[22:27]) \n if temp != -9999 and temp != -8888: # reading the values andh check if they are missing or removed as -9999 or -8888 before dividing by 10 as the instructions say \n temp = temp / 10. + 273.15 # 23- 27 integer temperature, [Celsius to Kelvin ] \n else:\n temp = np.nan \n \n tflag = line[27] # 28- 28 character temperature processing flag\n \n rh = int(line[28:33]) # 30- 34 integer relative humidity [%] \n if rh != -8888 and rh != -9999:\n rh = rh / 1000. # converting from percentage to absolute ratio \n else:\n rh = np.nan\n \n dpdp = int(line[34:39]) \n if dpdp != -9999 and dpdp !=-8888: \n dpdp = dpdp / 10. # 36- 40 integer dew point depression (degrees to tenth e.g. 11=1.1 C) \n else:\n dpdp = np.nan \n \n wdir = int(line[40:45]) # 41- 45 integer wind direction (degrees from north, 90 = east)\n if wdir == -8888 or wdir == -9999 :\n wdir = np.nan \n \n wspd = int(line[46:51]) # 47- 51 integer wind speed (meters per second to tenths, e.g. 11 = 1.1 m/s [m/s]\n if wspd != -8888 and wspd != -9999 :\n wspd = wspd / 10. \n else:\n wspd = np.nan \n if reltime == 9999.0:\n reltime = np.nan \n \n z_type = np.nan\n if not (np.isnan(press)):\n z_type = 1\n elif (np.isnan(press) and not np.isnan(gph) ) :\n z_type = 2 \n \n for value,var in zip([gph, temp, wspd, wdir, rh, dpdp], ['gph', 'temperature', 'wind_speed', 'wind_direction', 'relative_humidity' , 'dew_point_depression'] ):\n obs_id = obs_id +1 \n if not np.isnan(press): # when pressure is available, z_coord== pressure and z_type==1 \n z_type = 1 \n read_data.append ( ( 'IGRA2'.rjust(10), head_count, int(obs_id), idate, iday, ident, lat, lon, press, value, cdmvar_dic[var]['cdm_var'], int(cdmvar_dic[var]['cdm_unit']), numlev, z_type, release_time ) )\n elif (np.isnan(press) and not np.isnan(gph) ) : # when pressure is not available, z_coord== gph and z_type==2 \n z_type = 2 \n read_data.append ( ( 'IGRA2'.rjust(10), head_count, int(obs_id), idate, iday, ident, lat, lon, gph, value, cdmvar_dic[var]['cdm_var'], int(cdmvar_dic[var]['cdm_unit']), numlev, z_type, release_time ) )\n else:\n z_type = -2147483648 \n read_data.append ( ( 'IGRA2'.rjust(10), head_count, int(obs_id), idate, iday, ident, lat, lon, press, value, cdmvar_dic[var]['cdm_var'], int(cdmvar_dic[var]['cdm_unit']), numlev, z_type, release_time ) )\n\n\n df = pd.DataFrame(data= read_data, columns= column_names_igra2)\n \n df['observation_id'] = np.chararray.zfill( (df['observation_id'].astype(int)) .astype('S'+str(id_string_length ) ), id_string_length ) #converting to fixed length bite objects \n df['report_id'] = np.chararray.zfill( (df['report_id'].astype(int)).astype ('S'+str(id_string_length ) ), id_string_length )\n \n df = df.replace([-999.9, -9999, -999, -999.0, -99999.0, -99999.9, 99999.0, -99999.00 ], np.nan)\n \n df = df.sort_values(by = ['record_timestamp', 'vertco_reference_1@body' ] ) # FF check here !!!! \n \n return df, stations_id",
"def alom():\n #\n # this is the alpha\n inlist = list(\"begin\") # change data into a list element\n outlist[0:5] = inlist # place data in the list in the correct place\n # print(\"\".join(outlist)) # see result\n #\n # this is the omega\n inlist = list(\"end\")\n #\n # change data into a list element\n outlist[1247:1250] = inlist # place data in the list in the correct place\n outstr = \"\".join(outlist)\n print(outstr)\n print(len(outstr))\n # of = open(\"workfile\", \"w\")\n # of.write(outstr)",
"def isochrone_to_aa(*args, **kwargs):\n return isochrone_xv_to_aa(*args, **kwargs)",
"def __init__(self, full_path_of_source=''):\n if len(full_path_of_source) == 0:\n full_path_of_source = fileopenbox(default=os.path.curdir, filetypes=[\"*.txt\", \"*.kml\"])\n if full_path_of_source is None:\n print('Airspace conversion was aborted by the user')\n quit()\n # set template (this should not be changed)\n self.full_path_kml_template = r'Thermal_Map_Template5.kml' # set template file here: Folder must be named \"good\" and \"bad\"\n\n self.airspaces = [] # airspace container\n self.kml_template = {'header': [], 'good': [], 'bad': [], # will be filled after loading template\n 'good_subdivided': {'head':[], 'placemark': [], 'tail': []},\n 'bad_subdivided': {'head':[], 'placemark': [], 'tail': []}}\n self.txt_lines = [] # airspace file in open airspace format\n self.kml_lines = [] # airspace file in kml format\n \"\"\" handle conversion from and to KML / airspace format\"\"\"\n if full_path_of_source.lower().endswith('.kml'):\n self.kml_2_open_airspace_and_json_format(full_path_of_source)\n if full_path_of_source.lower().endswith('.txt'):\n self.open_airspace_format_2_kml(full_path_of_source)\n self.plot_all() # works for now only for TXT input",
"def __init__(self):\n self.__deviceselected__ = \"SR-DMS4AP{LOCALBUMP}DEV:Sel-SP\"\n self.__source__ = \"SR-DMS4AP{LOCALBUMP}S-SP\"\n self.__plane__ = \"SR-DMS4AP{LOCALBUMP}PLANE-SP\"\n #self.__xshift__ = \"SR-DMS4AP{LOCALBUMP}SHIFT:X-SP\"\n #self.__yshift__ = \"SR-DMS4AP{LOCALBUMP}SHIFT:Y-SP\"\n #self.__xangle__ = \"SR-DMS4AP{LOCALBUMP}ANGLE:X-SP\"\n #self.__yangle__ = \"SR-DMS4AP{LOCALBUMP}ANGLE:Y-SP\"\n self.__shift__ = \"SR-DMS4AP{LOCALBUMP}SHIFT-SP\"\n self.__angle__ = \"SR-DMS4AP{LOCALBUMP}ANGLE-SP\"\n # with all offsets\n self.__anglerb__ = \"SR-DMS4AP{LOCALBUMP}ANGLE-I\"\n self.__positionrb__ = \"SR-DMS4AP{LOCALBUMP}POS-I\"\n # with BBA offset only\n self.__anglerb0__ = \"SR-DMS4AP{LOCALBUMP}ANGLE:BBA-I\"\n self.__positionrb0__ = \"SR-DMS4AP{LOCALBUMP}POS:BBA-I\"\n\n self.__bpmposition__ = \"SR-DMS4AP{LOCALBUMP:BPM}Pos-I\"\n self.__bpmorbitx__ = \"SR-DMS4AP{LOCALBUMP:BPM}ORB:X-I\"\n self.__bpmorbity__ = \"SR-DMS4AP{LOCALBUMP:BPM}ORB:Y-I\"\n self.__bpmorbitx0__ = \"SR-DMS4AP{LOCALBUMP:BPM}ORB:X0-I\"\n self.__bpmorbity0__ = \"SR-DMS4AP{LOCALBUMP:BPM}ORB:Y0-I\"\n\n self.__correctorposition__ = \"SR-DMS4AP{LOCALBUMP:COR}Pos-I\"\n self.__hcorrectorcurrent__ = \"SR-DMS4AP{LOCALBUMP:HCOR}PS-SP\"\n self.__hcorrectordiff__ = \"SR-DMS4AP{LOCALBUMP:HCOR}PS:Delta-SP\"\n self.__vcorrectorcurrent__ = \"SR-DMS4AP{LOCALBUMP:VCOR}PS-SP\"\n self.__vcorrectordiff__ = \"SR-DMS4AP{LOCALBUMP:VCOR}PS:Delta-SP\"\n\n self.__undo__ = \"SR-DMS4AP{LOCALBUMP}Enbl:Undo-Cmd\"\n self.__apply__ = \"SR-DMS4AP{LOCALBUMP}Enbl-Cmd\"\n self.__status__ = \"SR-DMS4AP{LOCALBUMP}TS-I\"\n self.__idposinfo__ = \"SR-DMS4AP{LOCALBUMP}S-I\"\n self.__srcposition__ = \"SR-DMS4AP{LOCALBUMP}SRC-SP\"",
"def export_db_macserial(db, path, year):\n\n with open(path, 'w') as fh:\n print('#ifndef GENSERIAL_MODELINFO_AUTOGEN_H', file=fh)\n print('#define GENSERIAL_MODELINFO_AUTOGEN_H\\n', file=fh)\n print('// DO NOT EDIT! This is an autogenerated file.\\n', file=fh)\n print('#include \"macserial.h\"\\n', file=fh)\n\n print('typedef enum {', file=fh)\n\n for info in db:\n print(' {}, // {}'.format(\n info['SystemProductName'].replace(',', '_'),\n info['Specifications']['CPU'][0]\n ), file=fh)\n\n print('} AppleModel;\\n', file=fh)\n print('#define APPLE_MODEL_MAX {}\\n'.format(len(db)), file=fh)\n\n print('static PLATFORMDATA ApplePlatformData[] = {', file=fh)\n for info in db:\n print(' {{ \"{}\", \"{}\" }},'.format(\n info['SystemProductName'],\n info['SystemSerialNumber']\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#define APPLE_MODEL_CODE_MAX {}'.format(max(len(info['AppleModelCode']) for info in db)), file=fh)\n print('static const char *AppleModelCode[][APPLE_MODEL_CODE_MAX] = {', file=fh)\n\n for info in db:\n print(' /* {:14} */ {{\"{}\"}},'.format(\n info['SystemProductName'],\n '\", \"'.join(info['AppleModelCode'])\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#define APPLE_BOARD_CODE_MAX {}'.format(max(len(info['AppleBoardCode']) for info in db)), file=fh)\n print('static const char *AppleBoardCode[][APPLE_BOARD_CODE_MAX] = {', file=fh)\n\n for info in db:\n print(' /* {:14} */ {{\"{}\"}},'.format(\n info['SystemProductName'],\n '\", \"'.join(info['AppleBoardCode'])\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#define APPLE_MODEL_YEAR_MAX {}'.format(max(len(info['AppleModelYear']) for info in db)), file=fh)\n print('static uint32_t AppleModelYear[][APPLE_MODEL_YEAR_MAX] = {', file=fh)\n for info in db:\n print(' /* {:14} */ {{{}}},'.format(\n info['SystemProductName'],\n ', '.join(str(year) for year in info['AppleModelYear'])\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('static uint32_t ApplePreferredModelYear[] = {', file=fh)\n for info in db:\n print(' /* {:14} */ {},'.format(\n info['SystemProductName'],\n info.get('MacserialModelYear', 0)\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#endif // GENSERIAL_MODELINFO_AUTOGEN_H', file=fh)",
"def xephemFormat(self):\n line = []\n #Field 1: names\n names = [self.getName()]\n identifiers = self.getIdentifiers()\n if identifiers[0] is not None:\n names.append(identifiers[0])\n for i in range(1,4):\n if identifiers[i] is not None:\n names.extend(identifiers[i])\n line.append(\"|\".join(names))\n\n #Field 2: type designation\n objType = self.getType()\n if objType in (\"Galaxy Pair\", \"Galaxy Triplet\", \"Group of galaxies\"):\n line.append(\"f|A\")\n elif objType == \"Globular Cluster\":\n line.append(\"f|C\")\n elif objType == \"Double star\":\n line.append(\"f|D\")\n elif objType in (\"HII Ionized region\", \"Nebula\"):\n line.append(\"f|F\")\n elif objType == \"Galaxy\":\n if self.getHubble().startswith(\"S\"):\n line.append(\"f|G\")\n else:\n line.append(\"f|H\")\n elif objType == \"Dark Nebula\":\n line.append(\"f|K\")\n elif objType in (\"Emission Nebula\", \"Reflection Nebula\"):\n line.append(\"f|N\")\n elif objType in (\"Association of stars\", \"Open Cluster\"):\n line.append(\"f|O\")\n elif objType == \"Planetary Nebula\":\n line.append(\"f|P\")\n elif objType == \"Supernova remnant\":\n line.append(\"f|R\")\n elif objType == \"Star\":\n line.append(\"f|S\")\n elif objType == \"Star cluster + Nebula\":\n line.append(\"f|U\")\n else:\n line.append(\"f\")\n\n #Field 3: Right Ascension\n line.append(self.getRA())\n\n #Field 4: Declination\n line.append(self.getDec())\n\n #Field 5: Magnitude\n #We use the first available magnitude in the sequence b,v,j,h,k\n for mag in self.getMagnitudes():\n if mag is not None:\n line.append(str(mag))\n break\n\n #Field 6: optional Epoch, we let it empty\n line.append(\"\")\n\n #Field 7: Dimensions\n dimensions = []\n #Xephem format wants axes espressed in arcsec, we have arcmin\n for value in (self.getDimensions()[0],self.getDimensions()[1]):\n if value is not None:\n dimensions.append(str(value*60))\n else:\n dimensions.append(\"\")\n if self.getDimensions()[2] is not None:\n dimensions.append(str(value))\n else:\n dimensions.append(\"\")\n line.append(\"|\".join(dimensions))\n\n return \",\".join(line)",
"def mac_ntoa(mac):\n return '%.2x:%.2x:%.2x:%.2x:%.2x:%.2x' % tuple(map(ord, list(mac)))",
"def american_date_to_iso(connection):\n _update_date_by_regexp(connection=connection,\n regexp=\"^[0-9]{2}/[0-9]{2}/[0-9]{4}$\",\n new_value=\"\"\"CONCAT_WS('-',\n SUBSTR(cav.attribute_value, 7, 4),\n SUBSTR(cav.attribute_value, 1, 2),\n SUBSTR(cav.attribute_value, 4, 2))\n \"\"\")",
"def to_ole_auto(self):\n try:\n dt_obj = duparser.parse(timestamp)\n self.out_ole_auto = \"{0:.12f}\".format((dt_obj - self.epoch_1899).total_seconds() / 86400)\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.out_ole_auto = False\n return self.out_ole_auto",
"def to_ole_auto(self):\n ts_type = self.ts_types['ole_auto']\n try:\n dt_obj = duparser.parse(self.timestamp)\n if hasattr(dt_obj.tzinfo, '_offset'):\n dt_tz = dt_obj.tzinfo._offset.total_seconds()\n dt_obj = duparser.parse(self.timestamp, ignoretz=True)\n else:\n dt_tz = 0\n self.out_ole_auto = \"{0:.12f}\".format(((dt_obj - self.epoch_1899).total_seconds() - int(dt_tz)) / 86400)\n ts_output = str(\"{}\\t\\t{}\".format(ts_type, self.out_ole_auto))\n except Exception:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n print(str(exc_type) + \" - \" + str(exc_obj) + \" - line \" + str(exc_tb.tb_lineno))\n self.out_ole_auto = ts_output = False\n return self.out_ole_auto, ts_output",
"def operator_aircraft_info(self, apath):\r\n opfolder_path = apath.split(\"0 NEW\")[-1]\r\n opfolder = opfolder_path.replace(\"/\", \"\")\r\n opfolder = opfolder.replace(\"\\\\\", \"\")\r\n opfolder = opfolder.split(\" \")\r\n operator = opfolder[0].strip()\r\n aircraft = opfolder[1].strip()\r\n return operator, aircraft",
"def octa_cox_data_to_ss(data):\n t = pandas.Series((\n data['TIME_StartTime'] -\n data['TIME_StartTime'].values[0]) / 1.0e6, name='t, sec')\n xh = pandas.DataFrame(\n data[[\n 'LPOS_X', 'LPOS_Y', 'LPOS_Z',\n 'LPOS_VX', 'LPOS_VY', 'LPOS_VZ',\n 'ATT_Roll', 'ATT_Pitch', 'ATT_Yaw',\n 'ATT_RollRate', 'ATT_PitchRate', 'ATT_YawRate']].values,\n columns=[\n 'X', 'Y', 'Z', 'V_X', 'V_Y', 'V_Z',\n 'Phi', 'Theta', 'Psi',\n 'P', 'Q', 'R'], index=t)\n y = pandas.DataFrame(\n data[[\n 'GPS_Lat', 'GPS_Lon', 'GPS_Alt',\n 'SENS_BaroAlt',\n 'IMU1_AccX', 'IMU1_AccY', 'IMU1_AccZ',\n 'IMU1_GyroX', 'IMU1_GyroY', 'IMU1_GyroZ',\n 'IMU1_MagX', 'IMU1_MagY', 'IMU1_MagZ']].values,\n columns=[\n 'GPS_Lat', 'GPS_Lon', 'GPS_Alt',\n 'Baro_Alt',\n 'Acc_X', 'Acc_Y', 'Acc_Z',\n 'Gyro_X', 'Gyro_Y', 'Gyro_Z',\n 'Mag_X', 'Mag_Y', 'Mag_Z'], index=t)\n u_raw = pandas.DataFrame(\n ((data[[\n 'OUT0_Out0', 'OUT0_Out1', 'OUT0_Out2',\n 'OUT0_Out3', 'OUT0_Out4', 'OUT0_Out5', 'OUT0_Out6',\n 'OUT0_Out7']] - 1000.0) / 1000.0).values,\n columns=['1', '2', '3', '4', '5', '6', '7', '8'], index=t)\n c_mix_octo = np.array([\n [1, 1, 1, 1, 1, 1, 1, 1], # thrust\n [-1, 1, 1, -1, -1, 1, 1, -1], # roll\n [-1, -1, 1, 1, -1, -1, 1, 1], # pitch\n [1, -1, 1, -1, 1, -1, 1, -1], # yaw\n ]) / 8.0\n u = pandas.DataFrame(\n c_mix_octo.dot(u_raw.T).T,\n columns=['thrust', 'roll', 'pitch', 'yaw'],\n index=t)\n return t, xh, u, y, u_raw",
"def read_aircraft_nav_into_awot(\n AmprTB, project='OLYMPEX', platform='NASA ER-2', flight_number=None):\n\n if not hasattr(AmprTB, 'Aircraft_Nav'):\n print('No aircraft information in argument, failing ...')\n return\n\n flight = {}\n varlist = ['latitude', 'longitude', 'altitude', 'time']\n for var in varlist:\n flight[var] = {}\n flight['latitude']['data'] = AmprTB.Aircraft_Nav['GPS Latitude']\n flight['longitude']['data'] = AmprTB.Aircraft_Nav['GPS Longitude']\n flight['altitude']['data'] = AmprTB.Aircraft_Nav['GPS Altitude']\n\n ampr_datetime = []\n for et in AmprTB.Epoch_Time:\n ampr_datetime.append(dt.datetime(1970, 1, 1) +\n dt.timedelta(seconds=np.float(et)))\n flight['time']['data'] = ampr_datetime\n\n for var in varlist:\n flight[var]['data'] = np.ma.masked_array(\n flight[var]['data'], mask=False)\n flight['flight_number'] = flight_number\n flight['project'] = project\n flight['platform'] = platform\n flight['Uwind'] = None\n flight['Vwind'] = None\n return flight",
"def vac2air(w):\n return w / (1.0 + 2.735182E-4 + 131.4182 / w ** 2 + 2.76249E8 / w ** 4)",
"def to_ole_be(self):\n try:\n dt_obj = duparser.parse(timestamp)\n delta = (dt_obj - self.epoch_1899).total_seconds() / 86400\n conv = struct.unpack('<Q', struct.pack('<d', delta))[0]\n self.out_ole_be = str(hexlify(struct.pack('>Q', conv))).strip(\"b'\").strip(\"'\")\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.out_ole_be = False\n return self.out_ole_be",
"def to_amdl(self):\n from .adipls import ADIPLSStellarModel\n\n ioff = (0 if self.r[0] < 1e6 else 1) # mimic ADIPLS's FGONG to AMDL script\n A = np.zeros((len(self.data) + ioff, 6))\n\n # we can safely ignore division by 0 here\n with np.errstate(divide='ignore', invalid='ignore'):\n A[ioff:,0] = self.x\n A[ioff:,1] = self.q/self.x**3\n A[ioff:,2] = self.Vg\n A[ioff:,3] = self.Gamma_1\n A[ioff:,4] = self.AA\n A[ioff:,5] = self.U\n\n A[0,0] = 0.\n A[0,1] = 4.*np.pi/3.*self.rho[0]*self.R**3/self.M\n A[0,2] = 0.\n A[0,3] = self.Gamma_1[0]\n A[0,4] = 0.\n A[0,5] = 3.\n\n D = np.zeros(8)\n D[0] = self.M\n D[1] = self.R\n D[2] = self.P[0]\n D[3] = self.rho[0]\n D[4] = 4.*np.pi/3.*self.G*(self.rho[0]*self.R)**2/(self.P[0]*self.Gamma_1[0])\n D[5] = D[4]\n D[6] = -1.0\n D[7] = 0.0\n\n return ADIPLSStellarModel(D, A, G=self.G)",
"def data_airline():\n return load_airline()",
"def to_ir(self):",
"def format_data(self, raw_data):\n opz = raw_data.copy()\n opz['datetime'] = pd.to_datetime(opz['Datum-tijd'], format='%Y-%m-%dT%H:%M:%SZ')\n opz.drop(['Datum-tijd'],axis=1, inplace=True)\n opz['dag']=opz['datetime'].dt.day\n opz['tijd'] = opz['datetime'].dt.time\n #voeg open/dicht data toe en bepaal momenten waarop dit wisselt\n opz['Opzetstuk Noord (°)'] = opz['Opzetstuk Noord (°)'].str.replace(',', '.').astype(float)\n opz['Opzetstuk Zuid (°)'] = opz['Opzetstuk Zuid (°)'].str.replace(',', '.').astype(float)\n opz['Opzetstuk Noord (°)'].fillna(opz['Opzetstuk Zuid (°)'], inplace=True)\n opz['Opzetstuk Zuid (°)'].fillna(opz['Opzetstuk Noord (°)'], inplace=True)\n return opz",
"def indoor_air_quality(self):\n # name, command, signals, delay\n return self._i2c_read_words_from_cmd(command=[0x20, 0x08], reply_size=2, delay=0.05)",
"def to_abivars(self):",
"def to_ACEScg():\r\n selNodes = nuke.selectedNodes()\r\n for node in selNodes:\r\n if node.Class() == 'Read':\r\n inputDataType = {\r\n '8-bit fixed': 169, '16-bit fixed': 169,\r\n '16-bit half float': 163, '32-bit float': 163\r\n }\r\n bitDepth = node.metadata('input/bitsperchannel')\r\n node['colorspace'].setValue(inputDataType[bitDepth])\r\n fileParm = node['file'].value()\r\n fileName = str(fileParm.split('/')[-1])\r\n newName = str(fileName.split('.')[0] + '_ACEScg')\r\n fileName = fileName.replace(str(fileName.split('.')[0]), newName)\r\n filename, fileExt = os.path.splitext(fileName)\r\n newFileName = filename + '.exr'\r\n newPath = fileParm.replace(\r\n str(fileParm.split('/')[-1]), newFileName)\r\n\r\n # Create write node and save out as ACEScg\r\n wNode = nuke.nodes.Write()\r\n wNode.setInput(0, node)\r\n wNode['file'].setValue(newPath)\r\n wNode['file_type'].setValue(3)\r\n wNode['colorspace'].setValue(16)\r\n nuke.execute(wNode, start=1, end=1, incr=1)",
"def __make_geo(self):\n # gmsh freecad_part.iges -o out_iges.geo -0\n fname_list = self.__fname.split('.')\n geo_file = fname_list[0]+'.geo'\n runstr = \"%s %s -o %s -0\" % (environment.GMSH, self.__fname, geo_file)\n print(runstr)\n subprocess.call(runstr, shell=True)\n print('Wrote file: %s' % geo_file)",
"def atm_print():\n\n # Initialize file\n metric_filename = \"stdatmos_si.txt\"\n with open(metric_filename, 'w') as output_handle:\n\n # Create header\n output_handle.write(\"Geometric Geopotential Speed of\\n\")\n output_handle.write(\"Altitude Altitude Temperature Pressure Density Sound \\n\")\n output_handle.write(\" (m) (m) (K) (N/m**2) (kg/m**3) (m/s) \\n\")\n output_handle.write(\"-----------------------------------------------------------------------\\n\")\n\n # Loop through altitudes\n for i in range(51):\n\n # Calculate properties\n h = i*2000.0\n z, t, p, d = statsi(h)\n a = np.sqrt(1.4*287.0528*t)\n\n # Write to file\n write_string = \"{0:<10}{1:<13.5f}{2:<13.5f}{3:<14.5e}{4:<13.5e}{5:<8.4f}\\n\".format(h, z, t, p, d, a)\n output_handle.write(write_string)\n\n # Initialize file\n english_filename = \"stdatmos_ee.txt\"\n with open(english_filename, 'w') as output_handle:\n\n # Create header\n output_handle.write(\"Geometric Geopotential Speed of\\n\")\n output_handle.write(\"Altitude Altitude Temperature Pressure Density Sound \\n\")\n output_handle.write(\" (ft) (ft) (R) (lbf/ft^2) (slugs/ft^3) (ft/s) \\n\")\n output_handle.write(\"------------------------------------------------------------------------\\n\")\n\n # Loop through altitudes\n for i in range(51):\n\n # Calculate properties\n h = i*5000.0\n z, t, p, d = statee(h)\n a = np.sqrt(1.4*287.0528*t/1.8)/0.3048\n\n # Write to file\n write_string = \"{0:<10}{1:<13.5f}{2:<13.5f}{3:<14.5e}{4:<13.5e}{5:<8.4f}\\n\".format(h, z, t, p, d, a)\n output_handle.write(write_string)",
"def uacalc_format(self, name):\n st = '<?xml version=\"1.0\"?>\\n<algebra>\\n <basicAlgebra>\\n <algName>'+\\\n name+(str(self.index) if self.index!=None else '')+\\\n '</algName>\\n <cardinality>'+str(self.cardinality)+\\\n '</cardinality>\\n <operations>\\n'\n for x in self.operations:\n st += ' <op>\\n <opSymbol>\\n <opName>'+\\\n x+'</opName>\\n'\n oplst = type(self.operations[x]) == list\n if oplst and type(self.operations[x][0]) == list:\n st += ' <arity>2</arity>\\n </opSymbol>\\n <opTable>\\n <intArray>\\n' + xmlopstr(self.operations[x])\n else:\n st += ' <arity>'+('1' if oplst else '0')+'</arity>\\n </opSymbol>\\n <opTable>\\n <intArray>\\n <row>' + (str(self.operations[x])[1:-1] if oplst else str(self.operations[x]))+'</row>\\n'\n st += ' </intArray>\\n </opTable>\\n </op>\\n'\n return st+' </operations>\\n </basicAlgebra>\\n</algebra>\\n'",
"def OPCtimetransformOld(data, to):\n outtimes = []\n \n times = {\n 'ms':[],\n 'SS':[],\n 'MM':[],\n 'HH':[]\n }\n for i in range(0, len(data)):\n item = data[i]\n try: \n times['HH'].append(int(item[0:2]))\n times['MM'].append(int(item[2:4]))\n times['SS'].append(int(item[4:6]))\n times['ms'].append(int(item[7:9]))\n except ValueError:\n # strange value 2319010.00 in 201129 file...\n olditem = item\n newitem = item[:4] + item[4+1:]\n print( ('Repairing strange value %s into %s')%(olditem, newitem) )\n try:\n times['HH'].append(int(newitem[0:2]))\n times['MM'].append(int(newitem[2:4]))\n times['SS'].append(int(newitem[4:6]))\n times['ms'].append(int(newitem[7:9]))\n except ValueError:\n print(newitem)\n\n # OPC times go up to 60 minutes. This is corrected by moving one minute\n times['MM'] = [max(0,x-1) for x in times['MM']]\n times['SS'] = [max(0,x-1) for x in times['SS']]\n\n for i in range(0, len(data)):\n md = dt.datetime(1900,1,1,times['HH'][i], times['MM'][i], times['SS'][i]) \n outtimes.append( dt.datetime.strftime(md, to))\n\n return outtimes",
"def align(): # open EH and fast shutter\n\t#marAuxiliary.closeMarShield()\n\td2in()\n\td3in()\n\tsh('o')",
"def iso_date_to_american(connection):\n _update_date_by_regexp(connection=connection,\n regexp=\"^[0-9]{4}-[0-9]{2}-[0-9]{2}$\",\n new_value=\"\"\"CONCAT_WS('/',\n SUBSTR(cav.attribute_value, 6, 2),\n SUBSTR(cav.attribute_value, 9, 2),\n SUBSTR(cav.attribute_value, 1, 4))\n \"\"\")",
"def Uiso(data, logfile):\n printer.register_file(logfile, 'log', mode='a')\n # isofilters, isopartnerfilters = set_filter()\n #===========================================================================\n # keys=['cart_int','cart_ext','cart_sum','cart_meas','iso_meas']\n #===========================================================================\n use1, use2 = None, None\n try:\n use = config.arg('iso').split(':')\n use1 = use[0]\n use2 = use[1]\n\n except:\n use1 = 'cart_sum'\n use2 = 'cart_sum'\n # printer('\\nSelected filters:\\n\\n Attribute | Value | True | Function'\n # '\\n===================================================\\nAtom filter: | | |')\n # for isofilter in isofilters:\n # printer('{:15s} | {:12s} | {:5s} | {}'.format(isofilter[0], isofilter[1], isofilter[2], isofilter[3]))\n # printer('-----------------|--------------|-------|----------\\nPartner filter: | | |')\n # for isofilter in isopartnerfilters:\n # printer('{:15s} | {:12s} | {:5s} | {}'.format(isofilter[0], isofilter[1], isofilter[2], isofilter[3]))\n printer('\\nComparing {} of hydrogen atoms\\nwith {} of parent atoms.\\n'.format(use1, use2))\n printer(' Riding | Parent | U_rel | U_rel\\n atom | atom | geom | arith')\n printer(' ================================')\n geom_sum = []\n arit_sum = []\n for atom in data['exp'].atoms:\n # if apply_filter(atom, isofilters):\n for heavy_atom in cg.get_framework_neighbours(atom, useH=True):\n if not atom == heavy_atom and filter_atom_pair(config, atom, heavy_atom):\n U_rel_geom = cg.Uiso(atom.adp[use1]) / cg.Uiso(heavy_atom.adp[use2])\n geom_sum.append(U_rel_geom)\n U_rel_arith = cg.Uiso(atom.adp[use1], mean='arithmetic') / \\\n cg.Uiso(heavy_atom.adp[use2], mean='arithmetic')\n arit_sum.append(U_rel_arith)\n printer(' {light:5s} | {heavy:5s} | {U:5.3f} | {U2:5.3f}'.format(light=atom.name,\n heavy=heavy_atom,\n U=U_rel_geom,\n U2=U_rel_arith))\n printer(' -------|--------|-------|-------')\n printer(' {light:5s} | {heavy:5s} | {U:5.3f} | {U2:5.3f}'.format(light='mean',\n heavy='---',\n U=np.mean(geom_sum),\n U2=np.mean(arit_sum)))\n printer(' {light:5s} | {heavy:5s} | {U:5.3f} | {U2:5.3f}'.format(light='SD',\n heavy='---',\n U=np.std(geom_sum),\n U2=np.std(arit_sum)))\n\n printer('{temp:.1f} {U:5.3f} {Uer:5.3f} {U2:5.3f} {U2er:5.3f}'.format(temp=data.temperature,\n U=np.mean(geom_sum),\n Uer=np.std(geom_sum),\n U2=np.mean(arit_sum),\n U2er=np.std(arit_sum)),\n use=['log'])\n printer.spacer()",
"def ROCKSTAR_ASCII():\n dt= [\n ('haloid' , np.int64), #id\n ('n_particles' , np.int64), #num_p\n ('mhalo' , np.float32), #mvir \n ('mbasic' , np.float32), #mbound_vir \n ('rvir' , np.float32), #rvir\n ('vmax' , np.float32), #vmax\n ('rvmax' , np.float32), #rvmax\n ('vrms' , np.float32), #vrms \n ('x_pos' , np.float32), #x\n ('y_pos' , np.float32), #y\n ('z_pos' , np.float32), #z\n ('x_vel' , np.float32), #vx\n ('y_vel' , np.float32), #vy\n ('z_vel' , np.float32), #vz\n ('x_ang' , np.float32), #Jx\n ('y_ang' , np.float32), #Jy\n ('z_ang' , np.float32), #Jz\n ('engery' , np.float32), #E\n ('spinParameter' , np.float32), #Spin\n ('unc_pos' , np.float32), #PosUncertainty\n ('unc_vel' , np.float32), #VelUncertainty\n ('x_vel_bulk' , np.float32), #bulx_vx\n ('y_vel_bulk' , np.float32), #bulx_vy\n ('z_vel_bulk' , np.float32), #bulx_vz\n ('unc_vel_bulk' , np.float32), #BulkVelUnc\n ('n_core' , np.int64), #n_core\n ('mhalo_200b' , np.float32), #m200b \n ('mhalo_200c' , np.float32), #m200c\n ('mhalo_500c' , np.float32), #m500c\n ('mhalo_2500c' , np.float32), #m2500c\n ('x_off' , np.float32), #Xoff\n ('v_off' , np.float32), #Yoff\n ('spin_Bullock' , np.float32), #spin_bullock\n ('b_to_a' , np.float32), #b_to_a \n ('c_to_a' , np.float32), #c_to_a\n ('x_a' , np.float32), #A[x]\n ('y_a' , np.float32), #A[y]\n ('z_a' , np.float32), #A[z] \n ('b_to_a_500c' , np.float32), #b_to_a(500c)\n ('c_to_a_500c' , np.float32), #c_to_a(500c) \n ('x_a_500c' , np.float32), #A[x](500c) \n ('y_a_500c' , np.float32), #A[y](500c) \n ('z_a_500c' , np.float32), #A[z](500c)\n ('rscale' , np.float32), #rs \n ('rscale_Klypin' , np.float32), #rs_Klypin\n ('T_U' , np.float32), #T/|U|\n ('Mpseudo_Behroozi', np.float32), #M_pe_Behroozi\n ('Mpseudo_Diemer' , np.float32), #M_pe_Diemer\n ('rhalf_mass' , np.float32), #Halfmass_Radius\n ('idx' , np.int64), #internal debugging quantity\n ('i_so' , np.int64), #internal debugging quantity\n ('i_ph' , np.int64), #internal debugging quantity\n ('n_particle_child', np.int64), #internal debugging quantity\n ('mmetric' , np.float32) #internal debugging quantity \n ] \n return dt",
"def translateORFtoAAs(self,sequence,number):\r\n AAStringfromORF = str()\r\n startingM = int()\r\n for i in range(0,len(sequence)-2,3):\r\n if sequence[i:i+3] != \"AUG\":\r\n pass\r\n else:\r\n startingM = i\r\n for i in range(startingM,len(sequence)-2,3):\r\n x = self.tabletoTranslate(sequence[i:i+3])\r\n AAStringfromORF+=x\r\n if x == \"-\":\r\n self.listofSequences.append(AAStringfromORF.rstrip(\"-\").lstrip().rstrip())\r\n AAStringfromORF = str()\r\n break",
"def produce_output_txt(self):\n\n NAME = \"TODO get name form cpacs object\"\n\n result_dir = get_results_directory(\"WeightConventional\")\n\n output_file = Path(result_dir, \"Aircraft_Geometry.out\")\n\n OutputTextFile = open(output_file, \"w\")\n\n OutputTextFile.write(\"\\n#################################################\")\n OutputTextFile.write(\"\\n###### AIRCRAFT GEOMETRY EVALUATION MODULE ######\")\n OutputTextFile.write(\"\\n###### OUTPUTS ######\")\n OutputTextFile.write(\"\\n#################################################\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nAircraft: \" + NAME)\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nGeometry Evaluations-----------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nUSEFUL INFO -------------------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\n \"\\nIf fuselage or wing number is greater than 1 the\\n\"\n \"information of each obj are listed in an \"\n \"array ordered\\nprogressively\"\n )\n OutputTextFile.write(\n \"\\nSymmetry output: 0 = no symmetry, 1 = x-y,\\n\" + \"2 = x-z, 3 = y-z planes\"\n )\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nRESULTS -----------------------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nFUSELAGE ----------------------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(f\"\\nNumber of fuselage sections [-]: {self.fuse_sec_nb}\")\n OutputTextFile.write(f\"\\nNumber of fuselage segments [-]: {self.fuse_seg_nb}\")\n OutputTextFile.write(f\"\\nCabin segments array [-]: {self.cabin_seg}\")\n OutputTextFile.write(f\"\\nFuse Length [m]: {np.around(self.fuse_length, 5)}\")\n OutputTextFile.write(f\"\\nFuse nose Length [m]: {np.around(self.fuse_nose_length, 5)}\")\n OutputTextFile.write(f\"\\nFuse cabin Length [m]: {np.around(self.fuse_cabin_length, 5)}\")\n OutputTextFile.write(f\"\\nFuse tail Length [m]: {np.around(self.fuse_tail_length, 5)}\")\n OutputTextFile.write(f\"\\nAircraft Length [m]: {np.around(self.tot_length, 5)}\")\n OutputTextFile.write(\n \"\\nCircumference of each section of the fuselage [m]:\"\n f\"\\n{np.around(self.fuse_sec_circ, 5)}\"\n )\n OutputTextFile.write(\n \"\\nRelative distance of each section of the\"\n + \"fuselage, respect to the first one [m]: \\n\"\n + str(np.around(self.fuse_sec_rel_dist, 5))\n )\n OutputTextFile.write(\n \"\\nLength of each segment of the fuselage [m]: \\n\"\n + str(np.around(self.fuse_seg_length, 5))\n )\n OutputTextFile.write(\n \"\\nMean fuselage width [m]: \" + str(np.around(self.fuse_mean_width, 5))\n )\n OutputTextFile.write(\n \"\\nWidth of each section of the fuselage [m]: \\n\"\n + str(np.around(self.fuse_sec_width, 5))\n )\n OutputTextFile.write(\n \"\\nVolume of each segment of the fuselage \"\n \"[m^3]: \\n\" + str(np.around(self.fuse_seg_vol, 5))\n )\n OutputTextFile.write(\n \"\\nVolume of the cabin [m^3]: \" + str(np.around(self.fuse_cabin_vol, 5))\n )\n OutputTextFile.write(\"\\nVolume of the fuselage [m^3]: \" + str(np.around(self.fuse_vol, 5)))\n OutputTextFile.write(\"\\n\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(\"\\nWINGS -------------------------------------------\")\n OutputTextFile.write(\"\\n-------------------------------------------------\")\n OutputTextFile.write(f\"\\nNumber of Wings [-]: {self.wing_nb}\")\n OutputTextFile.write(f\"\\nWing symmetry plane [-]: {self.wing_sym}\")\n OutputTextFile.write(f\"\\nNumber of wing sections [-]: {self.wing_sec_nb}\")\n OutputTextFile.write(f\"\\nNumber of wing segments [-]: {self.wing_seg_nb}\")\n OutputTextFile.write(f\"\\nWing Span [m]: \\n{np.around(self.wing_span, 5)}\")\n OutputTextFile.write(\n \"\\nWing MAC length [m]: \\n\"\n + str(\n np.around(\n self.wing_mac[\n 0,\n ],\n 5,\n )\n )\n )\n OutputTextFile.write(\n \"\\nWing MAC x,y,z coordinate [m]: \\n\"\n + str(\n np.around(\n self.wing_mac[\n 1:4,\n ],\n 5,\n )\n )\n )\n OutputTextFile.write(\n \"\\nWings sections thickness [m]: \\n\" + str(np.around(self.wing_sec_thickness, 5))\n )\n OutputTextFile.write(\n \"\\nWings sections mean thickness [m]: \\n\" + str(np.around(self.wing_sec_mean_thick, 5))\n )\n OutputTextFile.write(\n \"\\nWing segments length [m]: \\n\" + str(np.around(self.wing_seg_length, 5))\n )\n OutputTextFile.write(\n \"\\nWing max chord length [m]: \\n\" + str(np.around(self.wing_max_chord, 5))\n )\n OutputTextFile.write(\n \"\\nWing min chord length [m]: \\n\" + str(np.around(self.wing_min_chord, 5))\n )\n OutputTextFile.write(\n \"\\nWings planform area [m^2]: \\n\" + str(np.around(self.wing_plt_area, 5))\n )\n OutputTextFile.write(\n \"\\nMain wing planform area [m^2]: \" + str(np.around(self.wing_plt_area_main, 5))\n )\n OutputTextFile.write(\"\\nVolume of each wing [m^3]: \\n\" + str(np.around(self.wing_vol, 5)))\n OutputTextFile.write(\"\\nTotal wing volume [m^3]: \" + str(np.around(self.wing_tot_vol, 5)))\n OutputTextFile.write(\"\\nWing volume for fuel storage [m^3]: \" + str(self.wing_fuel_vol))\n\n # Close Text File\n OutputTextFile.close()",
"def to_ole_be(self):\n ts_type = self.ts_types['ole_be']\n try:\n dt_obj = duparser.parse(self.timestamp)\n if hasattr(dt_obj.tzinfo, '_offset'):\n dt_tz = dt_obj.tzinfo._offset.total_seconds()\n dt_obj = duparser.parse(self.timestamp, ignoretz=True)\n else:\n dt_tz = 0\n delta = ((dt_obj - self.epoch_1899).total_seconds() - int(dt_tz)) / 86400\n conv = struct.unpack('<Q', struct.pack('<d', delta))[0]\n self.out_ole_be = str(struct.pack('>Q', conv).hex())\n ts_output = str(\"{}\\t{}\".format(ts_type, self.out_ole_be))\n except Exception:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n print(str(exc_type) + \" - \" + str(exc_obj) + \" - line \" + str(exc_tb.tb_lineno))\n self.out_ole_be = ts_output = False\n return self.out_ole_be, ts_output",
"def output(self):\n to_write = 'X '\n to_write += str(self.def_field['name'])+' '\n to_write += str(self.def_field['pin_number'])+' '\n to_write += str(self.def_field['x'])+' '\n to_write += str(self.def_field['y'])+' '\n to_write += str(self.def_field['length'])+' '\n to_write += self.def_field['direction']+' '\n to_write += str(self.def_field['size_num'])+' '\n to_write += str(self.def_field['size_name'])+' '\n #to_write += str(self.def_field['part'])+' '\n to_write += str(self.def_field['dmg'])+' '\n to_write += str(self.def_field['type'])+' '\n to_write += self.def_field['shape']\n to_write += '\\n'\n return to_write",
"def test_get_isotherm(self):\n pygaps.isotherm_from_isodb('10.1002adfm.201200084.Isotherm3')",
"def standarize_apollo2data(apollo2_data, num_dim, retained_i, retained_r, retained_g, verbose=False,res_nufi_removal=True):\n\n if verbose == True:\n print ' ... standarization of IS and OS'\n apollo2_data = standarization_oftags(apollo2_data)\n apollo2_data = standarization_ofxs(\n apollo2_data,res_nufi_removal, retained_i=retained_i, retained_r=retained_r, retained_g=retained_g)\n # FP-style for the phase-space\n apollo2_data['PS'] = standarization_ofPS(\n apollo2_data['PS'], num_dim, apollo2_data['order_tuple'])\n\n try:\n apollo2_data.pop('order_tuple')\n except KeyError:\n warnings.warn('Check structure of incomming a2 dic')\n\n if verbose == True:\n print ' ... standarization of conc'\n\n apollo2_data['PS']['real']['conc'] = standarization_ofconc(apollo2_data['I'])\n apollo2_data['I'] = elimination_ofconc(apollo2_data['I'])\n try:\n apollo2_data['fi'].pop('iota')\n apollo2_data['k'].pop('b2')\n except KeyError:\n warnings.warn('Check structure of incomming a2 data. dic.keys() = %s' % apollo2_data.keys())\n\n # FP-style for the k/SI\n if verbose == True:\n print ' ... standarization of k,fi'\n for eigen_pair in ['k', 'fi']:\n for eigen in apollo2_data[eigen_pair]:\n if eigen != 'b2': # this is not always present\n if verbose == True:\n print ' ... doing ', eigen_pair, eigen\n tmp = standarization_ofPScodomain(\n apollo2_data[eigen_pair][eigen], apollo2_data['PS'])\n apollo2_data[eigen_pair][eigen] = OrderedDict()\n apollo2_data[eigen_pair][eigen][eigen] = tmp\n if eigen_pair == 'k':\n apollo2_data[eigen_pair][eigen]['ro'] = reactivity_calculation(\n apollo2_data[eigen_pair][eigen][eigen])\n # FP-style for the xs data\n # easier to use auxiliary dic than in-situ transofrmation, but less elegant\n # handeling the interface at this level, thus pure functions can be used\n if verbose == True:\n print ' ... standarization of xs'\n aux_dic = OrderedDict()\n output_space_names = []\n output_space_tup = OrderedDict()\n for x in ['1']: # eventually zones will come in a2 data\n aux_dic[x] = OrderedDict()\n for i in apollo2_data['I']:\n aux_dic[x][i] = OrderedDict()\n for r in apollo2_data['I'][i]['R']:\n aux_dic[x][i][r] = OrderedDict()\n for g in apollo2_data['I'][i]['R'][r]:\n if verbose == True:\n print ' ... doing ', i, r, g\n aux_dic[x][i][r][g] = standarization_ofPScodomain(\n apollo2_data['I'][i]['R'][r][g], apollo2_data['PS'])\n name = '_' + x + '_' + i + '_' + r + '_' + g\n output_space_names.append(name)\n output_space_tup.update({name: XsTupPickable(x, i, r, g)})\n apollo2_data.pop('I')\n apollo2_data['xs'] = aux_dic\n apollo2_data['xs_names'] = output_space_names\n apollo2_data['xs_tuple'] = output_space_tup\n del(aux_dic) # the reference is eliminated\n apollo2_data['XIRG'] = hierachy_nomenclature(apollo2_data)\n return apollo2_data",
"def write_equipment_file(self, model, **kwargs):\n output_file = self.output_path + \"/equipment.txt\"\n\n with open(output_file, \"w\") as f:\n\n # Header\n f.write(\"[GENERAL]\\n\")\n current_date = datetime.now().strftime(\"%B %d, %Y at %H:%M:%S\")\n f.write(\"DATE={}\\n\".format(current_date))\n f.write(\"CYME_VERSION=8.02\\n\")\n f.write(\"\\n[SI]\\n\")\n\n # Substations\n #\n if len(self.substations) > 0:\n f.write(\"\\n[SUBSTATION]\\n\")\n f.write(\n \"FORMAT_SUBSTATION=ID,MVA,KVLL,KVLLdesired,R1,X1,R0,X0,R2,X2,PhaseAngle,MVA_1,MVA_2,MVA_3,MVA_4,Conn,PrimaryEquivalentType,SubEqVal1,SubEqVal2,SubEqVal3,SubEqVal4,SubPrimaryLLVoltage,SecondaryFaultReactance,TxfoConnection,HarmonicEnveloppe,BackgroundHarmonicVoltage,BaseMVA,ImpedanceUnit,BranchID_1,PrimProtDevID_1,PrimProtDevNum_1,TransformerID_1,TransformerNum_1,SubXs_1,SecProtDevID_1,SecProtDevNum_1,BranchStatus_1,BranchID_2,PrimProtDevID_2,PrimProtDevNum_2,TransformerID_2,TransformerNum_2,SubXs_2,SecProtDevID_2,SecProtDevNum_2,BranchStatus_2,BranchID_3,PrimProtDevID_3,PrimProtDevNum_3,TransformerID_3,TransformerNum_3,SubXs_3,SecProtDevID_3,SecProtDevNum_3,BranchStatus_3,BranchID_4,PrimProtDevID_4,PrimProtDevNum_4,TransformerID_4,TransformerNum_4,SubXs_4,SecProtDevID_4,SecProtDevNum_4,BranchStatus_4,BranchID_5,PrimProtDevID_5,PrimProtDevNum_5,TransformerID_5,TransformerNum_5,SubXs_5,SecProtDevID_5,SecProtDevNum_5,BranchStatus_5,FailRate,TmpFailRate,MajorRepairTime,\"\n )\n f.write(\n \"MinorRepairTime,MajorFailureProportion,SymbolID,Favorite,Flags,Comments\\n\"\n )\n\n for sub in self.substations:\n if \"sub_ID\" in sub:\n f.write(sub[\"sub_ID\"] + \",\")\n if \"MVA\" in sub:\n f.write(sub[\"MVA\"] + \",\")\n else:\n f.write(\",\")\n if \"KVLL\" in sub:\n # NOTE: Setting the voltage to 1.05pu at the feeder head is raw coded here\n # TODO: Come up with a less dirty way to have 1.05pu at the substation\n f.write(\n \"{a},{b},\".format(\n a=sub[\"KVLL\"], b=float(sub[\"KVLL\"]) * 1.00\n )\n ) # *1.05))\n else:\n f.write(\",,\")\n #\n # TODO: automatically detect if default or real values should be used for source impedance\n #\n if \"R1\" in sub:\n f.write(sub[\"R1\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"X1\" in sub:\n f.write(sub[\"X1\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"R0\" in sub:\n f.write(sub[\"R0\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"X0\" in sub:\n f.write(sub[\"X0\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"R2\" in sub:\n f.write(sub[\"R2\"] + \",\")\n elif \"R0\" in sub:\n f.write(sub[\"R0\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"X2\" in sub:\n f.write(sub[\"X2\"] + \",\")\n elif \"X0\" in sub:\n f.write(sub[\"X0\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"phase_angle\" in sub:\n f.write(sub[\"phase_angle\"] + \",\")\n else:\n f.write(\",\")\n\n f.write(\n \",,,,,,,,,,,,,,,,0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,\"\n )\n f.write(\"\\n\")\n\n # Switches\n #\n # Writing default values for switches\n #\n f.write(\"\\n[SWITCH]\\n\")\n f.write(\n \"FORMAT_SWITCH=ID,Amps,Amps_1,Amps_2,Amps_3,Amps_4,KVLL,Reversible,FailRate,TmpFailRate,MajorRepairTime,MinorRepairTime,MajorFailureProportion,StuckProbability,SwitchTime,SymbolOpenID,SymbolCloseID,SinglePhaseLocking,RemoteControlled,Automated,Comments\\n\"\n )\n f.write(\n \"DEFAULT,100.000000,100.000000,100.000000,100.000000,100.000000,25.000000,0,,,,,,,,0,0,0,0,0,\\n\"\n )\n for ID, data in self.switchcodes.items():\n f.write(str(ID) + \",\")\n f.write(data)\n f.write(\"\\n\")\n\n # Fuses\n #\n # Writing default values for fuses\n #\n f.write(\"\\n[FUSE]\\n\")\n f.write(\n \"FORMAT_FUSE=ID,Amps,Amps_1,Amps_2,Amps_3,Amps_4,KVLL,Reversible,InterruptingRating,FailRate,TmpFailRate,MajorRepairTime,MinorRepairTime,MajorFailureProportion,StuckProbability,SwitchTime,SymbolOpenID,SymbolCloseID,SinglePhaseLocking,Comments,Manufacturer,Model,TCCRating\\n\"\n )\n f.write(\n \"DEFAULT,100.000000,100.000000,100.000000,100.000000,100.000000,25.000000,0,600.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,0,,,,\\n\"\n )\n for ID, data in self.fusecodes.items():\n f.write(str(ID) + \",\")\n f.write(data)\n f.write(\"\\n\")\n\n # Reclosers\n #\n # Writing default values for reclosers\n #\n f.write(\"\\n[RECLOSER]\\n\")\n f.write(\n \"FORMAT_RECLOSER=ID,Amps,Amps_1,Amps_2,Amps_3,Amps_4,KVLL,Reversible,InterruptingRating,FailRate,TmpFailRate,MajorRepairTime,MinorRepairTime,MajorFailureProportion,StuckProbability,SwitchTime,SymbolOpenID,SymbolCloseID,SinglePhaseLocking,SinglePhaseTripping,RemoteControlled,Automated,Comments,RecloserType,ControlType,Model\\n\"\n )\n f.write(\n \"DEFAULT,100.000000,100.000000,100.000000,100.000000,100.000000,25.000000,0,600.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,0,0,0,0,,1,,\\n\"\n )\n for ID, data in self.reclosercodes.items():\n f.write(str(ID) + \",\")\n f.write(data)\n f.write(\"\\n\")\n\n # Breakers\n #\n # Writing default values for breakers\n #\n f.write(\"\\n[BREAKER]\\n\")\n f.write(\n \"FORMAT_BREAKER=ID,Amps,Amps_1,Amps_2,Amps_3,Amps_4,KVLL,Reversible,InterruptingRating,FailRate,TmpFailRate,MajorRepairTime,MinorRepairTime,MajorFailureProportion,StuckProbability,SwitchTime,SymbolOpenID,SymbolCloseID,SinglePhaseLocking,SinglePhaseTripping,RemoteControlled,Automated,Comments\\n\"\n )\n f.write(\n \"DEFAULT,100.000000,100.000000,100.000000,100.000000,100.000000,25.000000,0,600.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,0,0,0,0,\\n\"\n )\n for ID, data in self.breakercodes.items():\n f.write(str(ID) + \",\")\n f.write(data)\n f.write(\"\\n\")\n\n # Cables\n #\n f.write(\"\\n[CABLE]\\n\")\n f.write(\n \"FORMAT_CABLE=ID,R1,R0,X1,X0,B1,B0,Amps,CableType,UserDefinedImpedances,Frequency,Temperature\\n\"\n )\n f.write(\n \"DEFAULT,0.040399,0.055400,0.035900,0.018200,0.000000,0.000000,447.000000,0,1,60.000000,25.000000\\n\"\n )\n for ID, data in self.cablecodes.items():\n f.write(str(ID))\n for key in [\"R1\", \"R0\", \"X1\", \"X0\", \"B1\", \"B0\", \"amps\", \"cabletype\"]:\n if key in data:\n f.write(\",\" + str(data[key]))\n else:\n f.write(\",\")\n f.write(\",1,60.0000,25.00000\\n\")\n\n # Lines\n #\n if len(self.linecodes_overhead) > 0:\n f.write(\"\\n[LINE UNBALANCED]\\n\")\n f.write(\n \"FORMAT_LINEUNBALANCED=ID,Ra,Rb,Rc,Xa,Xb,Xc,Ba,Bb,Bc,MutualResistanceAB,MutualResistanceBC,MutualResistanceCA,MutualReactanceAB,MutualReactanceBC,MutualReactanceCA,MutualShuntSusceptanceAB,MutualShuntSusceptanceBC,MutualShuntSusceptanceCA,CondID_A,CondID_B,CondID_C,CondID_N1,CondID_N2,SpacingID,AmpsA,AmpsB,AmpsC,UserDefinedImpedances,Transposed\\n\"\n )\n\n for ID, data in self.linecodes_overhead.items():\n f.write(str(ID))\n for key in [\n \"RA\",\n \"RB\",\n \"RC\",\n \"XA\",\n \"XB\",\n \"XC\",\n \"Ba\",\n \"Bb\",\n \"Bc\",\n \"MutualResistanceAB\",\n \"MutualResistanceBC\",\n \"MutualResistanceCA\",\n \"MutualReactanceAB\",\n \"MutualReactanceBC\",\n \"MutualReactanceCA\",\n \"MutualShuntSusceptanceAB\",\n \"MutualShuntSusceptanceBC\",\n \"MutualShuntSusceptanceCA\",\n \"CondID_A\",\n \"CondID_B\",\n \"CondID_C\",\n \"CondID_N1\",\n \"CondID_N2\",\n \"SpacingID\",\n \"AmpsA\",\n \"AmpsB\",\n \"AmpsC\",\n \"UserDefinedImpedances\",\n ]:\n if key in data:\n f.write(\",\" + str(data[key]))\n else:\n if key in [\n \"CondID_A\",\n \"CondID_B\",\n \"CondID_C\",\n \"CondID_N1\",\n \"CondID_N2\",\n \"SpacingID\",\n ]:\n f.write(\"NONE,\")\n else:\n f.write(\",0\")\n f.write(\",0\\n\")\n\n # Conductors\n #\n f.write(\"\\n[CONDUCTOR]\\n\")\n f.write(\"FORMAT_CONDUCTOR=ID,Diameter,GMR,R25,Amps,WithstandRating\\n\")\n f.write(\"DEFAULT,1.000001,1.000001,0.7,2000.000000,2000.000000\\n\")\n if len(self.conductors) > 0:\n for ID, data in self.conductors.items():\n if ID == \"DEFAULT\":\n continue\n f.write(ID)\n f.write(data)\n f.write(\"\\n\")\n\n # Spacing table\n #\n f.write(\"\\n[SPACING TABLE FOR LINE]\\n\")\n f.write(\n \"FORMAT_SPACINGTABLEFORLINE=ID,GMDPh-Ph,GMDPh-N,AvgPhCondHeight,AvgNeutralHeight,PosOfCond1_X,PosOfCond1_Y,PosOfCond2_X,PosOfCond2_Y,PosOfCond3_X,PosOfCond3_Y,PosOfNeutralCond_X,PosOfNeutralCond_Y,PosOfNeutralCond_N2_X,PosOfNeutralCond_N2_Y,BundleDistance,NBPhasesPerCircuit,NBConductorsPerPhase,NBNeutrals,TowerType,DistanceA,DistanceB,DistanceC,DistanceD,DistanceE,ConductorStatusN1,ConductorStatusN2,FootingResistanceN1,FootingResistanceN2,TowerSpanN1,TowerSpanN2,Favorite,Flags,Comments\\n\"\n )\n f.write(\n \"DEFAULT,,,,,-0.609600,10.058400,0.000000,8.839200,0.609600,10.058400,0.000000,11.277600,,,0.010000,3,1,1,0,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,1.000000,1.000000,300.000000,300.000000,0,0,\\n\"\n )\n\n f.write(\n \"N_ABOVE_1PH,,,,,0.000000,9.601200,,,,,0.000000,10.363200,,,0.010000,1,1,1,0,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,1.000000,1.000000,300.000000,300.000000,0,0,\\n\"\n )\n f.write(\n \"N_ABOVE_2PH,,,,,-1.127760,9.601200,1.127760,9.601200,,,0.000000,10.363200,,,0.010000,2,1,1,0,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,1.000000,1.000000,300.000000,300.000000,0,0,\\n\"\n )\n f.write(\n \"N_ABOVE_3PH,,,,,-1.127760,9.601200,0.000000,9.601200,1.127760,9.601200,0.000000,10.363200,,,0.010000,3,1,1,0,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,1.000000,1.000000,300.000000,300.000000,0,0,\\n\"\n )\n\n # TODO\n # Add the user-defined spacing tables here\n\n # Capacitors\n #\n if len(self.capcodes) > 0:\n f.write(\"\\n[SHUNT CAPACITOR]\\n\")\n f.write(\n \"FORMAT_SHUNTCAPACITOR=ID,KVAR,KV,CostForFixedBank,CostForSwitchedBank,Type\\n\"\n )\n\n for ID, data in self.capcodes.items():\n f.write(\"capacitor_\" + str(ID) + \",\")\n f.write(data.strip(\",\"))\n f.write(\",0,0,0\")\n f.write(\"\\n\")\n\n # Two winding transformers\n #\n if len(self.two_windings_trans_codes) > 0:\n f.write(\"\\n[TRANSFORMER]\\n\")\n f.write(\n \"FORMAT_TRANSFORMER=ID,Type,KVA,VoltageUnit,KVLLprim,KVLLsec,Z1,Z0,XR,XR0,Conn,WindingType,NoLoadLosses,PhaseShift,IsLTC\\n\"\n )\n\n for ID, data in self.two_windings_trans_codes.items():\n f.write(\"transformer_\" + str(ID) + \",\")\n f.write(data.strip(\",\"))\n f.write(\"\\n\")\n\n # Three winding transformers\n #\n if len(self.three_windings_trans_codes) > 0:\n f.write(\"\\n[THREE WINDING TRANSFORMER]\\n\")\n f.write(\n \"FORMAT_THREEWINDINGTRANSFORMER=ID,PrimaryRatedCapacity,PrimaryVoltage,PrimaryConnection,PrimaryToSecondaryZ1,PrimaryToSecondaryZ0,PrimaryToSecondaryXR1,PrimaryToSecondaryXR0,PrimaryToTertiaryZ1,PrimaryToTertiaryZ0,PrimaryToTertiaryXR1,PrimaryToTertiaryXR0,SecondaryToTertiaryZ1,SecondaryToTertiaryZ0,SecondaryToTertiaryXR1,SecondaryToTertiaryXR0,SecondaryCapacityLimit1,SecondaryCapacityLimit2,TertiaryCapacityLimit1,TertiaryCapacityLimit2,TertiaryConnection,NoLoadLosses\\n\"\n )\n for ID, data in self.three_windings_trans_codes.items():\n f.write(\"3_wdg_transformer_\" + str(ID) + \",\")\n f.write(data.strip(\",\"))\n f.write(\"\\n\")\n\n # Regulators\n #\n if len(self.reg_codes) > 0:\n f.write(\"\\n[REGULATOR]\\n\")\n f.write(\n \"FORMAT_REGULATOR=ID,KVA,Bandwidth,CT,PT,Type,KVLN,MaxBuck,MaxBoost,Taps,Reversible\\n\"\n )\n\n for ID, data in self.reg_codes.items():\n f.write(\"regulator_\" + str(ID) + \",\")\n f.write(data.strip(\",\"))\n f.write(\"\\n\")\n\n if len(self.irradiance_profiles) > 0:\n f.write(\"\\n[INSOLATION MODEL] \\n\")\n f.write(\"FORMAT_INSOLATIONMODEL=ID,FromFile,FileName\\n\")\n for i in self.irradiance_profiles:\n f.write(\n \"{label},1,{loc}\".format(\n label=i, loc=self.irradiance_profiles[i]\n )\n )\n f.write(\"\\n\")\n\n if len(self.bess_codes) > 0:\n f.write(\"\\n[BESS] \\n\")\n f.write(\n \"FORMAT_BESS=ID,RatedStorageEnergy,MaxChargingPower,MaxDischargingPower,ChargeEfficiency,DischargeEfficiency\\n\"\n )\n for value in self.bess_codes:\n f.write(self.bess_codes[value] + \",\" + value + \"\\n\")\n f.write(\"\\n\")",
"def _write_orca_input(\n self, input_file_name: str, atom_types: np.array, positions: np.array\n ):\n input_file = open(input_file_name, \"w\")\n geometry_block = []\n for idx in range(len(atom_types)):\n geometry_block.append(\n \"{:2s} {:15.8f} {:15.8f} {:15.8f}\".format(\n chemical_symbols[atom_types[idx]], *positions[idx]\n )\n )\n input_file.write(self.orca_template.format(geometry=\"\\n\".join(geometry_block)))\n input_file.close()",
"def main():\n parser = argparse.ArgumentParser(description=\"Align ORB-SLAM results with ground truth according to camera orientation in AirSim.\")\n parser.add_argument(\"filename\", help = \"Trajectory in TUM format.\")\n parser.add_argument(\"output\", help = \"Output file.\")\n \n parser.add_argument(\"roll\", help=\"Camera Roll.\")\n parser.add_argument(\"pitch\", help=\"Camera Pitch.\")\n parser.add_argument(\"yaw\", help=\"Camera Yaw.\")\n\n args = parser.parse_args()\n\n roll = float(args.roll)*m.pi/180\n pitch = float(args.pitch)*m.pi/180\n yaw = float(args.yaw)*m.pi/180\n\n file = open(args.filename, \"r\")\n newFile = open(args.output, \"w\")\n \n for line in file:\n values = line.split()\n x = float(values[3])\n y = float(values[1])\n z = float(values[2])\n position = np.array([[x],[y],[z]])\n position = Rx(roll) @ Ry(pitch) @ Rz(yaw) @ position\n\n newFile.write(\"%s %s %s %s %s %s %s %s\\n\" %(values[0], position[0,0], position[1,0], position[2,0], values[4], values[5], values[6], values[7]))\n\n file.close\n newFile.close\n print(\"Saved as \" + args.output)\n\n return",
"def _raw_to_string(self, dtype, units='Angstrom', atom_format=None, ghost_format=None, width=17, prec=12):\n\n #molrec = self.to_dict(force_units=units, np_out=True)\n molrec = self.to_dict(np_out=True)\n smol = molparse.to_string(molrec,\n dtype=dtype,\n units=units,\n atom_format=atom_format,\n ghost_format=ghost_format,\n width=width,\n prec=prec)\n return smol",
"def _from_physical_space(self, a_n, lobatto, use_mp, dps):\n pass",
"def flyc_nofly_extract(po, fwmdlfile):\n (po.nfzone_pos, po.nfzone_count) = flyc_nofly_zone_pos_search(po, fwmdlfile, 0, po.expect_func_align, po.expect_data_align, po.min_match_accepted)\n if po.nfzone_pos < 0:\n raise ValueError(\"Flight controller no fly zones array signature not detected in input file.\")\n (po.nfcord_pos, po.nfcord_count) = flyc_nofly_cord_pos_search(po, fwmdlfile, 0, po.expect_func_align, po.expect_data_align, po.min_match_accepted)\n if po.nfcord_pos < 0:\n raise ValueError(\"Flight controller no fly coords array signature not detected in input file.\")\n nfzones = flyc_nofly_merged_zones_array(po, fwmdlfile)\n if (po.verbose > 0):\n print(\"{}: Creating JSON file...\".format(po.mdlfile))\n inffile = open(po.inffile, \"w\")\n inffile.write(\"{\\\"release_limits\\\":[\\n\")\n i = 0\n for parprop in nfzones:\n inffile.write(\"{\")\n for ppname in ('area_id','type','shape',):\n inffile.write(\"\\\"{:s}\\\":{:d}\".format(ppname,parprop[ppname]))\n inffile.write(\",\")\n for ppname in ('lat','lng',):\n inffile.write(\"\\\"{:s}\\\":{:06f}\".format(ppname,parprop[ppname]))\n inffile.write(\",\")\n for ppname in ('radius','warning','level','disable','updated_at','begin_at','end_at',):\n inffile.write(\"\\\"{:s}\\\":{:d}\".format(ppname,parprop[ppname]))\n inffile.write(\",\")\n for ppname in ('name',):\n inffile.write(\"\\\"{:s}\\\":\\\"{:s}\\\"\".format(ppname,parprop[ppname]))\n inffile.write(\",\")\n for ppname in ('storage','country',):\n inffile.write(\"\\\"{:s}\\\":{:d}\".format(ppname,parprop[ppname]))\n inffile.write(\",\")\n for ppname in ('city',):\n inffile.write(\"\\\"{:s}\\\":\\\"{:s}\\\"\".format(ppname,parprop[ppname]))\n inffile.write(\",\")\n for ppname in ('points',):\n inffile.write(\"\\\"{:s}\\\":{:s}\".format(ppname,parprop[ppname] if parprop[ppname] is not None else \"null\"))\n if (i+1 < len(nfzones)):\n inffile.write(\"},\\n\")\n else:\n inffile.write(\"}\\n\")\n i += 1\n inffile.write(\"]}\\n\")\n inffile.close()\n if (po.verbose > 0):\n print(\"{}: Done exporting.\".format(po.mdlfile))",
"def make_eph():\n\n # Get table data:\n tr_file = 'exoplanets_transiting.fits'\n if os.path.isfile( tr_file )==False:\n tutilities.download_data()\n t = atpy.Table( tr_file )\n\n # Open and prepare file for output writing to:\n eph_file_w = open( EPH_FILE, 'w' )\n header_str = '# Transiting planet positions and epochs \\n'\n header_str += '# Generated from exoplanet.org data \\n'\n header_str += '# Comment out those not needed \\n\\n'\n header_str += '# COLUMNS: \\n'\n header_str += '# Name, Vmag, RA, Dec, Epoch(HJD), Period(days), Duration(hrs) \\n\\n'\n eph_file_w.write( header_str )\n\n # Go through each of the planets alphabetically and extract\n # the necessary information:\n q = np.argsort( t.NAME )\n for i in range( t.NAME.size ):\n eph_file_w.write( '%-12.10s %.1f %s %s %15.7f %13.8f %8.4f \\n' % \\\n ( t.NAME[ q[i] ].replace(' ',''), t.V[ q[i] ], t.RA_STRING[ q[i] ], \\\n t.DEC_STRING[ q[i] ], t.TT[ q[i] ], t.PER[ q[i] ], t.T14[ q[i] ]*24. ) )\n eph_file_w.close()\n print '\\n\\nSaved output in %s' % EPH_FILE\n\n return None",
"def o2sat(s, pt):\n\n t = sw.T68conv(pt) + Kelvin\n # Eqn (4) of Weiss 1970 (the constants are used for units of ml O2/kg).\n a = (-177.7888, 255.5907, 146.4813, -22.2040)\n b = (-0.037362, 0.016504, -0.0020564)\n lnC = (a[0] + a[1] * (100. / t) + a[2] * np.log(t / 100.) + a[3] *\n (t / 100.) +\n s * (b[0] + b[1] * (t / 100.) + b[2] * (t / 100.) ** 2))\n osat = np.exp(lnC) * 1000. / 22.392 # Convert from ml/kg to um/kg.\n\n \"\"\"The Apparent Oxygen Utilization (AOU) value was obtained by subtracting\n the measured value from the saturation value computed at the potential\n temperature of water and 1 atm total pressure using the following\n expression based on the data of Murray and Riley (1969):\n\n ln(O2 in µmol/kg) = - 173.9894 + 255.5907(100/TK) + 146.4813 ln(TK/100) -\n 22.2040(TK/100) + Sal [-0.037362 + 0.016504(TK/100) - 0.0020564(TK/100)2],\n where TK is temperature in °K and Sal in the Practical Salinity (SP) scale.\n \"\"\"\n return osat",
"def eeg_writeavr(array,tsb,di,file):\t\t\n import shutil as shu\n f=open(file,'w')\n firstline = 'Npts= %i TSB= %i DI= %7.5f SB= %7.5f SC= %i NChan= %i\\n' %(array.shape[1],tsb,di,1,200,array.shape[0]) \n chnam = 'Cz FP1 FP2 F3 F4 C3 C4 P3 P4 O1 O2 F7 F8 T7 T8 P7 P8 Fz Pz FC1 FC2 CP1 CP2 FC5 FC6 CP5 CP6 FT9 FT10 TP9 TP10 PO9 PO10\\n'\n f.write(firstline)\n f.write(chnam)\n for i in range(array.shape[0]):\n tmp = array[i,:]\n f.write(('%7.5f ' * len(tmp)) %tuple(tmp))\n f.write('\\n')\n \n f.close()\n #may want to change this on different machines...\n src = '/Users/crislanting/Projects/EEG/data/33.elp'\n dest = file[:-4] + '.elp'\n shu.copyfile(src,dest)",
"def parse_aurinkopenkki(hex_str, port=None):\n b = bytes.fromhex(hex_str)\n val = struct.unpack('<BbxxfffIIIII', b)\n\n #struct t_AcudcDATA { \n # uint8_t msg_type;\n # uint8_t msg_ver;\n # float volt;\n # float amp;\n # float watt;\n # uint32_t runTime;\n # uint32_t inEnergy;\n # uint32_t outEnergy;\n # uint32_t inAh;\n # uint32_t outAh;\n \n data = {\n 'voltage': val[2],\n 'current': val[3],\n 'power': val[4],\n 'runtime': val[5],\n 'inEnergy': val[6],\n 'outEnergy': val[7],\n 'inmAh': val[8],\n 'outmAh': val[9],\n }\n return data",
"def export_db_macinfolib(db, path, year):\n\n with open(path, 'w') as fh:\n print('// DO NOT EDIT! This is an autogenerated file.', file=fh)\n print('#include \"MacInfoInternal.h\"', file=fh)\n print('CONST MAC_INFO_INTERNAL_ENTRY gMacInfoModels[] = {', file=fh)\n\n for info in db:\n if max(info['AppleModelYear']) < year:\n continue\n\n print(' {\\n'\n ' .SystemProductName = \"%s\",\\n'\n ' .BoardProduct = \"%s\",\\n'\n ' .BoardRevision = %s,\\n'\n ' .SmcRevision = {%s},\\n'\n ' .SmcBranch = {%s},\\n'\n ' .SmcPlatform = {%s},\\n'\n ' .BIOSVersion = \"%s\",\\n'\n ' .BIOSReleaseDate = \"%s\",\\n'\n ' .SystemVersion = \"%s\",\\n'\n ' .SystemSKUNumber = \"%s\",\\n'\n ' .SystemFamily = \"%s\",\\n'\n ' .BoardVersion = \"%s\",\\n'\n ' .BoardAssetTag = \"%s\",\\n'\n ' .BoardLocationInChassis = \"%s\",\\n'\n ' .SmcGeneration = 0x%X,\\n'\n ' .BoardType = 0x%X,\\n'\n ' .ChassisType = 0x%X,\\n'\n ' .MemoryFormFactor = 0x%X,\\n'\n ' .PlatformFeature = %s,\\n'\n ' .ChassisAssetTag = \"%s\",\\n'\n ' .FirmwareFeatures = 0x%XULL,\\n'\n ' .FirmwareFeaturesMask = 0x%XULL,\\n'\n ' },' % (\n info['SystemProductName'],\n info['BoardProduct'][0] if isinstance(info['BoardProduct'], list) else info['BoardProduct'],\n '0x{:X}'.format(info['BoardRevision']) if 'BoardRevision' in info else 'MAC_INFO_BOARD_REVISION_MISSING',\n ', '.join(map(str, info.get('SmcRevision', [0x00]))),\n ', '.join(map(str, info.get('SmcBranch', [0x00]))),\n ', '.join(map(str, info.get('SmcPlatform', [0x00]))),\n info['BIOSVersion'],\n info['BIOSReleaseDate'],\n info['SystemVersion'],\n info['SystemSKUNumber'],\n info['SystemFamily'],\n info['BoardVersion'],\n info['BoardAssetTag'],\n info['BoardLocationInChassis'],\n info['SmcGeneration'],\n info['BoardType'],\n info['ChassisType'],\n info['MemoryFormFactor'],\n '0x{:X}'.format(info['PlatformFeature']) if 'PlatformFeature' in info else 'MAC_INFO_PLATFORM_FEATURE_MISSING',\n info['ChassisAssetTag'],\n info.get('ExtendedFirmwareFeatures', info.get('FirmwareFeatures', 0)),\n info.get('ExtendedFirmwareFeaturesMask', info.get('FirmwareFeaturesMask', 0))\n ), file=fh)\n\n print('};', file=fh)\n\n print('CONST UINTN gMacInfoModelCount = ARRAY_SIZE (gMacInfoModels);', file=fh)\n print('CONST UINTN gMacInfoDefaultModel = 0;', file=fh)",
"def LoadAirplane():\n return vtkInterface.PolyData(planefile)",
"def ports(osm_path): \n return (retrieve(osm_path,'multipolygons',['landuse'],**{'landuse':[\"='industrial' or \",\"='port' or \",\"='harbour'\"]})).rename(columns={'landuse': 'asset'})",
"def date_to_iso(string):\r\n\r\n # disregard tokenisation, if it's there, to make this an easier conversion for GUTime\r\n string = re.sub(r'<([^~]*)~.+?>', r'\\1 ', string)\r\n\r\n # Defaults\r\n d = None\r\n m = None\r\n y = None\r\n h = None\r\n min = None\r\n s = None\r\n fs = None\r\n zone = None\r\n\r\n # ACE format\r\n match = re.search(r'(\\d\\d\\d\\d\\d\\d\\d\\d:\\d\\d\\d\\d)', re.sub('\\s', '', string))\r\n if match is not None:\r\n d = match.group(1)\r\n d = re.sub(r':', r'T', d)\r\n return d\r\n\r\n # Already in ISO format\r\n match = re.search(r'(\\d\\d\\d\\d-?\\d\\d-?\\d\\d)(-?(T\\d\\d(:?\\d\\d)?(:?\\d\\d)?([+-]\\d{1,4})?))?', re.sub('\\s', '', string))\r\n if match is not None:\r\n d = match.group(1)\r\n d = re.sub(r'-', r'', d)\r\n h = match.group(3)\r\n if h is not None:\r\n h = re.sub(r':', r'', h)\r\n return d + h\r\n else:\r\n return d\r\n\r\n # some pre-processing\r\n match = re.search('T\\d\\d(:?\\d\\d)?(:?\\d\\d)?([+-]\\d{1,4})?', re.sub('\\s', '', string))\r\n if match is not None:\r\n return re.sub(r':', r'', re.sub('\\s', '', string))\r\n\r\n # extract date\r\n if re.search(\r\n r'(\\d\\d?|' + expressions.ORDINAL_WORDS + r'|' + expressions.ORDINAL_NUMS + r')\\s+'\r\n r'(' + expressions.MONTHS + r'|' + expressions.MONTH_ABBRS + r'\\s*\\.?)\\s*,?\\s+(\\d\\d(\\s|\\Z)|\\d{4}\\b)',\r\n string, re.I) is not None:\r\n match = re.search(\r\n r'(\\d\\d?|' + expressions.ORDINAL_WORDS + r'|' + expressions.ORDINAL_NUMS + r')\\s+'\r\n r'(' + expressions.MONTHS + r'|' + expressions.MONTH_ABBRS + r'\\s*\\.?)\\s*,?\\s+(\\d\\d(\\s|\\Z)|\\d{4}\\b)',\r\n string, re.I)\r\n d = ordinal_to_num(match.group(1))\r\n m = month_to_num(match.group(5))\r\n y = match.group(7)\r\n\r\n elif re.search(\r\n r'(' + expressions.MONTHS + r'|' + expressions.MONTH_ABBRS + r'\\s*\\.?)\\s+'\r\n r'(\\d\\d?|' + expressions.ORDINAL_WORDS + r'|' + expressions.ORDINAL_NUMS + r')\\b,?\\s*(\\d\\d(\\s|\\Z)|\\d{4}\\b)',\r\n string, re.I) is not None:\r\n match = re.search(\r\n r'(' + expressions.MONTHS + r'|' + expressions.MONTH_ABBRS + r'\\s*\\.?)\\s+'\r\n r'(\\d\\d?|' + expressions.ORDINAL_WORDS + r'|' + expressions.ORDINAL_NUMS + r')\\b,?\\s*(\\d\\d(\\s|\\Z)|\\d{4}\\b)',\r\n string, re.I)\r\n d = ordinal_to_num(match.group(4))\r\n m = month_to_num(match.group(1))\r\n y = match.group(7)\r\n\r\n elif re.search(r'(\\d\\d\\d\\d)(\\/|\\-)(\\d\\d?)\\2(\\d\\d?)', re.sub('\\s', '', string)) is not None:\r\n match = re.search(r'(\\d\\d\\d\\d)(\\/|\\-)(\\d\\d?)\\2(\\d\\d?)', re.sub('\\s', '', string))\r\n m = match.group(3)\r\n d = match.group(4)\r\n y = match.group(1)\r\n\r\n elif re.search(r'(\\d\\d?)(\\/|\\-|\\.)(\\d\\d?)\\2(\\d\\d(\\d\\d)?)', re.sub('\\s', '', string)) is not None:\r\n match = re.search(r'(\\d\\d?)(\\/|\\-|\\.)(\\d\\d?)\\2(\\d\\d(\\d\\d)?)', re.sub('\\s', '', string))\r\n m = match.group(1)\r\n d = match.group(3)\r\n y = match.group(4)\r\n\r\n if y is not None:\r\n # check for European style date\r\n if 12 < int(m) <= 31 and int(d) <= 12:\r\n new_d = m\r\n m = d\r\n d = new_d\r\n\r\n # check for 2 digit year\r\n y = normalise_two_digit_year(str(y))\r\n\r\n iso = \"%4d%02d%02d\" % (int(y), int(m), int(d))\r\n\r\n else:\r\n iso = \"XXXXXXXX\"\r\n\r\n # Extract time\r\n match = re.search(r'(\\d?\\d):(\\d\\d)(:(\\d\\d)(\\.\\d+)?)?(([AP])\\.?M\\.?)?(([+\\-]\\d+|[A-Z][SD]T|GMT([+\\-]\\d+)?))?',\r\n re.sub('\\s', '', string), re.I)\r\n if match is not None:\r\n h = match.group(1)\r\n min = match.group(2)\r\n s = match.group(4)\r\n fs = match.group(5)\r\n ampm = match.group(7)\r\n zone = match.group(9)\r\n\r\n if ampm is not None and ampm[0].lower() == 'p':\r\n h = str(int(h) + 12)\r\n\r\n if zone is not None:\r\n zm = re.search(r'(GMT)([+\\-]\\d+)', zone)\r\n if zm is not None:\r\n zone = zm.group(2)\r\n elif zone.lower().find('gmt') > -1:\r\n zone = 'Z'\r\n elif re.search(r'([A-Z])([SD])T', zone) is not None:\r\n zm = re.search(r'([A-Z])([SD])T', zone)\r\n # Timezone offsets from GMT\r\n timezones = {\r\n \"R\": 1,\r\n \"E\": -5,\r\n \"C\": -6,\r\n \"M\": -7,\r\n \"P\": -8\r\n }\r\n if zm.group(1).upper() in timezones:\r\n zone = timezones[zm.group(1).upper()]\r\n if zm.group(2).lower() == 'd':\r\n zone += 1\r\n if zone < 0:\r\n zone = '-%02d00' % (-1 * zone)\r\n else:\r\n zone = '+%02d00' % zone\r\n elif re.search(r'(\\d\\d)(\\d\\d)\\s+(h(ou)?rs?|(on\\s+)?\\d\\d?\\/\\d)', string, re.I) is not None:\r\n match = re.search(r'(\\d\\d)(\\d\\d)\\s+(h(ou)?rs?|(on\\s+)?\\d\\d?\\/\\d)', string, re.I)\r\n h = match.group(1)\r\n min = match.group(2)\r\n\r\n if h is not None:\r\n if fs is not None:\r\n fs = re.sub(r'\\.', r'', fs)\r\n iso += 'T%02d%02d%02d.%02d' % (int(h), int(min), int(s), int(fs))\r\n elif s is not None:\r\n iso += 'T%02d%02d%02d' % (int(h), int(min), int(s))\r\n elif min is not None:\r\n iso += 'T%02d%02d' % (int(h), int(min))\r\n\r\n if zone is not None:\r\n iso += zone.lstrip()\r\n\r\n return iso",
"def ROCKSTAR_ASCII_list():\n \n dt= [\n ('haloid' , np.int64), #ID\n ('descIndex' , np.int64), #DescID\n ('mhalo' , np.float32), #Mvir\n ('vmax' , np.float32), #Vmax\n ('vrms' , np.float32), #Vrms \n ('rvir' , np.float32), #Rvir\n ('rscale' , np.float32), #Rs\n ('n_particles' , np.int64), #Np \n ('x_pos' , np.float32), #x\n ('y_pos' , np.float32), #y\n ('z_pos' , np.float32), #z\n ('x_vel' , np.float32), #vx\n ('y_vel' , np.float32), #vy\n ('z_vel' , np.float32), #vz\n ('x_ang' , np.float32), #Jx\n ('y_ang' , np.float32), #Jy\n ('z_ang' , np.float32), #Jz\n ('spinParameter' , np.float32), #Spin\n ('rscale_Klypin' , np.float32), #rs_Klypin \n ('mbasic' , np.float32), #Mvir_all\n ('mhalo_200b' , np.float32), #m200b \n ('mhalo_200c' , np.float32), #m200c\n ('mhalo_500c' , np.float32), #m500c\n ('mhalo_2500c' , np.float32), #m2500c\n ('x_off' , np.float32), #Xoff\n ('v_off' , np.float32), #Yoff\n ('spin_Bullock' , np.float32), #spin_bullock\n ('b_to_a' , np.float32), #b_to_a \n ('c_to_a' , np.float32), #c_to_a\n ('x_a' , np.float32), #A[x]\n ('y_a' , np.float32), #A[y]\n ('z_a' , np.float32), #A[z] \n ('b_to_a_500c' , np.float32), #b_to_a(500c)\n ('c_to_a_500c' , np.float32), #c_to_a(500c) \n ('x_a_500c' , np.float32), #A[x](500c) \n ('y_a_500c' , np.float32), #A[y](500c) \n ('z_a_500c' , np.float32), #A[z](500c)\n ('T_U' , np.float32), #T/|U|\n ('Mpseudo_Behroozi', np.float32), #M_pe_Behroozi\n ('Mpseudo_Diemer' , np.float32), #M_pe_Diemer\n ('rhalf_mass' , np.float32) #Halfmass_Radius \n ] \n return dt",
"def DARP2016_Acoular_XML():\n XYZ_array, array_cal = DARP2016_MicArray()\n array_name = \"DARP2016\"\n array_pos = XML_mic(XYZ_array)\n array_file = '<?xml version=\"1.0\" encoding=\"utf-8\"?>\\n<MicArray name=\"{name}\">\\n{pos}</MicArray>'.format(\n name=array_name, pos=array_pos)\n calib_data = XML_calib(array_cal, 100)\n calib_file = '<?xml version=\"1.0\" encoding=\"utf-8\"?>\\n<Calib name=\"{name}\">\\n{data}</Calib>'.format(\n name=20160101, data=calib_data)\n\n save_XML(array_name, array_file)\n save_XML(array_name+\"_calib\", calib_file)",
"def format(self, data):",
"def test_10_to_11_conversion(self) -> None:\n original_gpx = mod_gpx.GPX()\n original_gpx.creator = 'cr'\n original_gpx.name = 'q'\n original_gpx.description = 'w'\n original_gpx.time = mod_datetime.datetime(2014, 4, 7, 21, 17, 39, tzinfo=mod_gpxfield.SimpleTZ())\n original_gpx.bounds = mod_gpx.GPXBounds(1, 2, 3, 4)\n original_gpx.author_name = '789'\n original_gpx.author_email = '256@aaa'\n original_gpx.link = 'http://9890'\n original_gpx.link_text = '77888'\n original_gpx.keywords = 'kw'\n\n original_waypoint = mod_gpx.GPXWaypoint()\n original_waypoint.latitude = 12.3\n original_waypoint.longitude = 13.4\n original_waypoint.elevation = 121.89\n original_waypoint.time = mod_datetime.datetime(2015, 5, 8, 21, 17, 39, tzinfo=mod_gpxfield.SimpleTZ())\n original_waypoint.magnetic_variation = 1\n original_waypoint.geoid_height = 1\n original_waypoint.name = 'n'\n original_waypoint.comment = 'cm'\n original_waypoint.description = 'des'\n original_waypoint.source = 'src'\n original_waypoint.symbol = 'sym'\n original_waypoint.type = 'ty'\n original_waypoint.type_of_gpx_fix = 'dgps'\n original_waypoint.satellites = 13\n original_waypoint.horizontal_dilution = 14\n original_waypoint.vertical_dilution = 15\n original_waypoint.position_dilution = 16\n original_waypoint.age_of_dgps_data = 16\n original_waypoint.dgps_id = 17\n original_gpx.waypoints.append(original_waypoint)\n\n original_route = mod_gpx.GPXRoute()\n original_route.name = 'rten'\n original_route.comment = 'rtecm'\n original_route.description = 'rtedesc'\n original_route.source = 'rtesrc'\n # TODO url\n original_route.number = 101\n\n original_route_points = mod_gpx.GPXRoutePoint()\n original_route_points.latitude = 34.5\n original_route_points.longitude = 56.6\n original_route_points.elevation = 1001\n original_route_points.time = mod_datetime.datetime(2015, 5, 8, 21, 17, 17, tzinfo=mod_gpxfield.SimpleTZ())\n original_route_points.magnetic_variation = 12\n original_route_points.geoid_height = 13\n original_route_points.name = 'aaaaa'\n original_route_points.comment = 'wwww'\n original_route_points.description = 'cccc'\n original_route_points.source = 'qqq'\n # TODO url\n original_route_points.symbol = 'a.png'\n original_route_points.type = '2'\n original_route_points.type_of_gpx_fix = 'pps'\n original_route_points.satellites = 23\n original_route_points.horizontal_dilution = 19\n original_route_points.vertical_dilution = 20\n original_route_points.position_dilution = 21\n original_route_points.age_of_dgps_data = 22\n original_route_points.dgps_id = 23\n original_route.points.append(original_route_points)\n original_gpx.routes.append(original_route)\n\n original_track = mod_gpx.GPXTrack()\n original_track.name = 'rten'\n original_track.comment = 'rtecm'\n original_track.description = 'rtedesc'\n original_track.source = 'rtesrc'\n # TODO url\n original_track.number = 101\n\n original_track_point = mod_gpx.GPXTrackPoint()\n original_track_point.latitude = 34.6\n original_track_point.longitude = 57.6\n original_track_point.elevation = 1002\n original_track_point.time = mod_datetime.datetime(2016, 5, 8, 21, 17, 17, tzinfo=mod_gpxfield.SimpleTZ())\n original_track_point.magnetic_variation = 13\n original_track_point.geoid_height = 14\n original_track_point.name = 'aaaaajkjk'\n original_track_point.comment = 'wwwwii'\n original_track_point.description = 'ciccc'\n original_track_point.source = 'qssqq'\n # TODO url\n original_track_point.symbol = 'ai.png'\n original_track_point.type = '3'\n original_track_point.type_of_gpx_fix = 'pps'\n original_track_point.satellites = 24\n original_track_point.horizontal_dilution = 20\n original_track_point.vertical_dilution = 21\n original_track_point.position_dilution = 22\n original_track_point.age_of_dgps_data = 23\n original_track_point.dgps_id = 22\n\n original_track.segments.append(mod_gpx.GPXTrackSegment())\n original_track.segments[0].points.append(original_track_point)\n\n original_gpx.tracks.append(original_track)\n\n # Convert do GPX1.0:\n xml_10 = original_gpx.to_xml('1.0')\n print(xml_10)\n self.assertTrue('http://www.topografix.com/GPX/1/0' in xml_10)\n #pretty_print_xml(xml_10)\n gpx_1 = mod_gpxpy.parse(xml_10)\n\n # Convert do GPX1.1:\n xml_11 = gpx_1.to_xml('1.1')\n print(xml_11)\n self.assertTrue('http://www.topografix.com/GPX/1/1' in xml_11 and 'metadata' in xml_11)\n #pretty_print_xml(xml_11)\n gpx_2 = mod_gpxpy.parse(xml_11)\n\n # Convert do GPX1.0 again:\n xml_10 = gpx_2.to_xml('1.0')\n self.assertTrue('http://www.topografix.com/GPX/1/0' in xml_10)\n #pretty_print_xml(xml_10)\n gpx_3 = mod_gpxpy.parse(xml_10)\n\n for gpx in (gpx_1, gpx_2, gpx_3, ):\n self.assertTrue(gpx.creator is not None)\n self.assertEqual(original_gpx.creator, gpx.creator)\n\n self.assertTrue(gpx.name is not None)\n self.assertEqual(original_gpx.name, gpx.name)\n\n self.assertTrue(gpx.description is not None)\n self.assertEqual(original_gpx.description, gpx.description)\n\n self.assertTrue(gpx.keywords is not None)\n self.assertEqual(original_gpx.keywords, gpx.keywords)\n\n self.assertTrue(gpx.time is not None)\n self.assertEqual(original_gpx.time, gpx.time)\n\n self.assertTrue(gpx.author_name is not None)\n self.assertEqual(original_gpx.author_name, gpx.author_name)\n\n self.assertTrue(gpx.author_email is not None)\n self.assertEqual(original_gpx.author_email, gpx.author_email)\n\n self.assertTrue(gpx.link is not None)\n self.assertEqual(original_gpx.link, gpx.link)\n\n self.assertTrue(gpx.link_text is not None)\n self.assertEqual(original_gpx.link_text, gpx.link_text)\n\n self.assertTrue(gpx.bounds is not None)\n self.assertEqual(tuple(original_gpx.bounds), tuple(gpx.bounds)) # type: ignore\n\n self.assertEqual(1, len(gpx.waypoints))\n\n self.assertTrue(gpx.waypoints[0].latitude is not None)\n self.assertEqual(original_gpx.waypoints[0].latitude, gpx.waypoints[0].latitude)\n\n self.assertTrue(gpx.waypoints[0].longitude is not None)\n self.assertEqual(original_gpx.waypoints[0].longitude, gpx.waypoints[0].longitude)\n\n self.assertTrue(gpx.waypoints[0].elevation is not None)\n self.assertEqual(original_gpx.waypoints[0].elevation, gpx.waypoints[0].elevation)\n\n self.assertTrue(gpx.waypoints[0].time is not None)\n self.assertEqual(original_gpx.waypoints[0].time, gpx.waypoints[0].time)\n\n self.assertTrue(gpx.waypoints[0].magnetic_variation is not None)\n self.assertEqual(original_gpx.waypoints[0].magnetic_variation, gpx.waypoints[0].magnetic_variation)\n\n self.assertTrue(gpx.waypoints[0].geoid_height is not None)\n self.assertEqual(original_gpx.waypoints[0].geoid_height, gpx.waypoints[0].geoid_height)\n\n self.assertTrue(gpx.waypoints[0].name is not None)\n self.assertEqual(original_gpx.waypoints[0].name, gpx.waypoints[0].name)\n\n self.assertTrue(gpx.waypoints[0].comment is not None)\n self.assertEqual(original_gpx.waypoints[0].comment, gpx.waypoints[0].comment)\n\n self.assertTrue(gpx.waypoints[0].description is not None)\n self.assertEqual(original_gpx.waypoints[0].description, gpx.waypoints[0].description)\n\n self.assertTrue(gpx.waypoints[0].source is not None)\n self.assertEqual(original_gpx.waypoints[0].source, gpx.waypoints[0].source)\n\n # TODO: Link/url\n\n self.assertTrue(gpx.waypoints[0].symbol is not None)\n self.assertEqual(original_gpx.waypoints[0].symbol, gpx.waypoints[0].symbol)\n\n self.assertTrue(gpx.waypoints[0].type is not None)\n self.assertEqual(original_gpx.waypoints[0].type, gpx.waypoints[0].type)\n\n self.assertTrue(gpx.waypoints[0].type_of_gpx_fix is not None)\n self.assertEqual(original_gpx.waypoints[0].type_of_gpx_fix, gpx.waypoints[0].type_of_gpx_fix)\n\n self.assertTrue(gpx.waypoints[0].satellites is not None)\n self.assertEqual(original_gpx.waypoints[0].satellites, gpx.waypoints[0].satellites)\n\n self.assertTrue(gpx.waypoints[0].horizontal_dilution is not None)\n self.assertEqual(original_gpx.waypoints[0].horizontal_dilution, gpx.waypoints[0].horizontal_dilution)\n\n self.assertTrue(gpx.waypoints[0].vertical_dilution is not None)\n self.assertEqual(original_gpx.waypoints[0].vertical_dilution, gpx.waypoints[0].vertical_dilution)\n\n self.assertTrue(gpx.waypoints[0].position_dilution is not None)\n self.assertEqual(original_gpx.waypoints[0].position_dilution, gpx.waypoints[0].position_dilution)\n\n self.assertTrue(gpx.waypoints[0].age_of_dgps_data is not None)\n self.assertEqual(original_gpx.waypoints[0].age_of_dgps_data, gpx.waypoints[0].age_of_dgps_data)\n\n self.assertTrue(gpx.waypoints[0].dgps_id is not None)\n self.assertEqual(original_gpx.waypoints[0].dgps_id, gpx.waypoints[0].dgps_id)\n\n # route(s):\n\n self.assertTrue(gpx.routes[0].name is not None)\n self.assertEqual(original_gpx.routes[0].name, gpx.routes[0].name)\n\n self.assertTrue(gpx.routes[0].comment is not None)\n self.assertEqual(original_gpx.routes[0].comment, gpx.routes[0].comment)\n\n self.assertTrue(gpx.routes[0].description is not None)\n self.assertEqual(original_gpx.routes[0].description, gpx.routes[0].description)\n\n self.assertTrue(gpx.routes[0].source is not None)\n self.assertEqual(original_gpx.routes[0].source, gpx.routes[0].source)\n\n self.assertTrue(gpx.routes[0].number is not None)\n self.assertEqual(original_gpx.routes[0].number, gpx.routes[0].number)\n\n self.assertTrue(gpx.routes[0].points[0].latitude is not None)\n self.assertEqual(original_gpx.routes[0].points[0].latitude, gpx.routes[0].points[0].latitude)\n\n self.assertTrue(gpx.routes[0].points[0].longitude is not None)\n self.assertEqual(original_gpx.routes[0].points[0].longitude, gpx.routes[0].points[0].longitude)\n\n self.assertTrue(gpx.routes[0].points[0].elevation is not None)\n self.assertEqual(original_gpx.routes[0].points[0].elevation, gpx.routes[0].points[0].elevation)\n\n self.assertTrue(gpx.routes[0].points[0].time is not None)\n self.assertEqual(original_gpx.routes[0].points[0].time, gpx.routes[0].points[0].time)\n\n self.assertTrue(gpx.routes[0].points[0].magnetic_variation is not None)\n self.assertEqual(original_gpx.routes[0].points[0].magnetic_variation, gpx.routes[0].points[0].magnetic_variation)\n\n self.assertTrue(gpx.routes[0].points[0].geoid_height is not None)\n self.assertEqual(original_gpx.routes[0].points[0].geoid_height, gpx.routes[0].points[0].geoid_height)\n\n self.assertTrue(gpx.routes[0].points[0].name is not None)\n self.assertEqual(original_gpx.routes[0].points[0].name, gpx.routes[0].points[0].name)\n\n self.assertTrue(gpx.routes[0].points[0].comment is not None)\n self.assertEqual(original_gpx.routes[0].points[0].comment, gpx.routes[0].points[0].comment)\n\n self.assertTrue(gpx.routes[0].points[0].description is not None)\n self.assertEqual(original_gpx.routes[0].points[0].description, gpx.routes[0].points[0].description)\n\n self.assertTrue(gpx.routes[0].points[0].source is not None)\n self.assertEqual(original_gpx.routes[0].points[0].source, gpx.routes[0].points[0].source)\n\n self.assertTrue(gpx.routes[0].points[0].symbol is not None)\n self.assertEqual(original_gpx.routes[0].points[0].symbol, gpx.routes[0].points[0].symbol)\n\n self.assertTrue(gpx.routes[0].points[0].type is not None)\n self.assertEqual(original_gpx.routes[0].points[0].type, gpx.routes[0].points[0].type)\n\n self.assertTrue(gpx.routes[0].points[0].type_of_gpx_fix is not None)\n self.assertEqual(original_gpx.routes[0].points[0].type_of_gpx_fix, gpx.routes[0].points[0].type_of_gpx_fix)\n\n self.assertTrue(gpx.routes[0].points[0].satellites is not None)\n self.assertEqual(original_gpx.routes[0].points[0].satellites, gpx.routes[0].points[0].satellites)\n\n self.assertTrue(gpx.routes[0].points[0].horizontal_dilution is not None)\n self.assertEqual(original_gpx.routes[0].points[0].horizontal_dilution, gpx.routes[0].points[0].horizontal_dilution)\n\n self.assertTrue(gpx.routes[0].points[0].vertical_dilution is not None)\n self.assertEqual(original_gpx.routes[0].points[0].vertical_dilution, gpx.routes[0].points[0].vertical_dilution)\n\n self.assertTrue(gpx.routes[0].points[0].position_dilution is not None)\n self.assertEqual(original_gpx.routes[0].points[0].position_dilution, gpx.routes[0].points[0].position_dilution)\n\n self.assertTrue(gpx.routes[0].points[0].age_of_dgps_data is not None)\n self.assertEqual(original_gpx.routes[0].points[0].age_of_dgps_data, gpx.routes[0].points[0].age_of_dgps_data)\n\n self.assertTrue(gpx.routes[0].points[0].dgps_id is not None)\n self.assertEqual(original_gpx.routes[0].points[0].dgps_id, gpx.routes[0].points[0].dgps_id)\n\n # track(s):\n\n self.assertTrue(gpx.tracks[0].name is not None)\n self.assertEqual(original_gpx.tracks[0].name, gpx.tracks[0].name)\n\n self.assertTrue(gpx.tracks[0].comment is not None)\n self.assertEqual(original_gpx.tracks[0].comment, gpx.tracks[0].comment)\n\n self.assertTrue(gpx.tracks[0].description is not None)\n self.assertEqual(original_gpx.tracks[0].description, gpx.tracks[0].description)\n\n self.assertTrue(gpx.tracks[0].source is not None)\n self.assertEqual(original_gpx.tracks[0].source, gpx.tracks[0].source)\n\n self.assertTrue(gpx.tracks[0].number is not None)\n self.assertEqual(original_gpx.tracks[0].number, gpx.tracks[0].number)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].latitude is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].latitude, gpx.tracks[0].segments[0].points[0].latitude)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].longitude is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].longitude, gpx.tracks[0].segments[0].points[0].longitude)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].elevation is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].elevation, gpx.tracks[0].segments[0].points[0].elevation)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].time is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].time, gpx.tracks[0].segments[0].points[0].time)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].magnetic_variation is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].magnetic_variation, gpx.tracks[0].segments[0].points[0].magnetic_variation)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].geoid_height is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].geoid_height, gpx.tracks[0].segments[0].points[0].geoid_height)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].name is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].name, gpx.tracks[0].segments[0].points[0].name)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].comment is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].comment, gpx.tracks[0].segments[0].points[0].comment)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].description is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].description, gpx.tracks[0].segments[0].points[0].description)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].source is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].source, gpx.tracks[0].segments[0].points[0].source)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].symbol is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].symbol, gpx.tracks[0].segments[0].points[0].symbol)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].type is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].type, gpx.tracks[0].segments[0].points[0].type)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].type_of_gpx_fix is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].type_of_gpx_fix, gpx.tracks[0].segments[0].points[0].type_of_gpx_fix)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].satellites is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].satellites, gpx.tracks[0].segments[0].points[0].satellites)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].horizontal_dilution is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].horizontal_dilution, gpx.tracks[0].segments[0].points[0].horizontal_dilution)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].vertical_dilution is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].vertical_dilution, gpx.tracks[0].segments[0].points[0].vertical_dilution)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].position_dilution is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].position_dilution, gpx.tracks[0].segments[0].points[0].position_dilution)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].age_of_dgps_data is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].age_of_dgps_data, gpx.tracks[0].segments[0].points[0].age_of_dgps_data)\n\n self.assertTrue(gpx.tracks[0].segments[0].points[0].dgps_id is not None)\n self.assertEqual(original_gpx.tracks[0].segments[0].points[0].dgps_id, gpx.tracks[0].segments[0].points[0].dgps_id)",
"def toisostring(dt):\n return dt.format(ISOFORMAT) + 'Z'",
"def tozout(self, aline):\r\n # SetAusg(A120,5,A.St201_Y1_2_SwivelUnit_backward);\r\n #front|back|up|down|left|right\r\n pattern = re.compile(r'.*(?P<Var>A.*[sS]t(?P<StN>\\d+)_Y(?P<ZN>\\d+)_[24]_(?P<ZName>\\w+)_'\r\n r'(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\\s*\\).*')\r\n match = pattern.match(aline)\r\n if match:\r\n #print('match')\r\n self.Var = match.group('Var')\r\n self.StN = match.group('StN')\r\n self.ZN = match.group('ZN')\r\n self.ZName = match.group('ZName')\r\n self.Motion = match.group('Motion')\r\n # if re.compile(r'^up|down|left|right$').match(self.Motion):\r\n # self.Motion = self.Motion+'ward'\r\n # obj = re.compile(r'up|down|left|right')\r\n # if obj.match(self.Motion):\r\n # print('match')\r\n # self.Motion = obj.subn('ward',self.Motion)[0]\r\n self.Motion = re.sub(r'^(up|down|left|right)$',r'\\1ward', self.Motion)\r\n isgrippermatch = re.compile(r'.*(open|close).*').match(aline)\r\n if isgrippermatch:\r\n self.Ztype = 'gripper'\r\n else:\r\n self.Ztype = 'not gripper'",
"def _write(self):\n f = FortranFile(self.filename,mode='w')\n # Default omnivor binary header\n f.writeInts ( self.data['MK'] , 'i' ) \n f.writeInts ( self.data['itime'] , 'i' ) \n f.writeString ( self.data['version'] ) \n f.writeInts ( self.data['file_id'] , 'i' ) \n f.writeString ( self.data['sversion'] ) \n # Velocity field\n f.writeString ( self.data['stype'] ) \n f.writeInts ( self.data['is_grid'] , 'i' ) \n f.writeInts ( self.data['nCPs'] , 'i' ) \n if self.data['MK'] == 8:\n real_char='d'\n else:\n real_char='f'\n if self.data['is_grid']:\n f.writeInts ( self.data['n1'] , 'i' ) \n f.writeInts ( self.data['n2'] , 'i' ) \n f.writeInts ( self.data['n3'] , 'i' ) \n f.writeInts ( self.data['is_straight'] , 'i' ) \n f.writeReals ( self.data['v1'] , real_char ) \n f.writeReals ( self.data['v2'] , real_char ) \n f.writeReals ( self.data['v3'] , real_char ) \n\n CPs = self.data['CPs'].flatten(order = 'F')\n Utot = self.data['Utot'].flatten(order = 'F')\n f.writeReals(CPs,real_char)\n f.writeReals(Utot,real_char)",
"def to_mac(self):\n ts_type = self.ts_types['mac']\n try:\n dt_obj = duparser.parse(self.timestamp)\n if hasattr(dt_obj.tzinfo, '_offset'):\n dt_tz = dt_obj.tzinfo._offset.total_seconds()\n dt_obj = duparser.parse(self.timestamp, ignoretz=True)\n else:\n dt_tz = 0\n self.out_mac = str(int((dt_obj - self.epoch_2001).total_seconds() - int(dt_tz)))\n ts_output = str(\"{}\\t\\t{}\".format(ts_type, self.out_mac))\n except Exception:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n print(str(exc_type) + \" - \" + str(exc_obj) + \" - line \" + str(exc_tb.tb_lineno))\n self.out_mac = ts_output = False\n return self.out_mac, ts_output",
"def to_oskar_telescope_model(self, filename):\n pass",
"def aut2HOA(aut):\n state_cnt = 0\n state_transl_dict = dict()\n\n ###########################################\n def state_transl(state):\n \"\"\"state_transl(state) -> int\n\n Translates state names into numbers.\n \"\"\"\n nonlocal state_cnt\n nonlocal state_transl_dict\n\n if state not in state_transl_dict.keys():\n state_transl_dict[state] = state_cnt\n state_cnt += 1\n\n return str(state_transl_dict[state])\n ###########################################\n\n symb_cnt = 0\n symb_transl_dict = dict()\n\n ###########################################\n def symb_transl(symb):\n \"\"\"symb_transl(symb) -> int\n\n Translates symbol names into numbers.\n \"\"\"\n nonlocal symb_cnt\n nonlocal symb_transl_dict\n\n if symb not in symb_transl_dict.keys():\n symb_transl_dict[symb] = symb_cnt\n symb_cnt += 1\n\n return str(symb_transl_dict[symb])\n ###########################################\n\n # count states and transitions\n for st in aut[\"initial\"]:\n state_transl(st)\n for trans in aut[\"transitions\"]:\n src, symb, tgt = trans\n state_transl(src)\n symb_transl(symb)\n state_transl(tgt)\n for st in aut[\"final\"]:\n state_transl(st)\n\n res = \"\"\n res += \"HOA: v1\\n\"\n res += \"States: {}\\n\".format(state_cnt)\n\n res += \"Start: \"\n for state in aut[\"initial\"]:\n res += state_transl(state) + \" \"\n res += \"\\n\"\n\n # magic setting for Buchi condition\n res += \"acc-name: Buchi\\n\"\n res += \"Acceptance: 1 Inf(0)\\n\"\n\n # atomic propositions\n res += \"AP: {}\".format(symb_cnt)\n for i in range(symb_cnt):\n for key in symb_transl_dict:\n if symb_transl_dict[key] == i:\n res += \" \\\"{}\\\"\".format(key)\n res += \"\\n\"\n\n res += \"--BODY--\\n\"\n for (name, num) in state_transl_dict.items():\n res += \"State: {}\".format(num)\n if name in aut[\"final\"]:\n res += \" { 0 }\"\n res += \"\\n\"\n\n for trans in aut[\"transitions\"]:\n src, symb, tgt = trans\n if src == name:\n res += \" [\"\n for i in range(symb_cnt):\n if i != 0:\n res += \" & \"\n if symb_transl_dict[symb] != i:\n res += \"!\"\n res += str(i)\n\n res += \"] {}\\n\".format(state_transl(tgt))\n res += \"--END--\\n\"\n\n return res",
"def read_szx_fmv_11(eps_file):\n raw_data = eps_file.scaled_mdr\n raw_unscaled = eps_file.mdr\n mphr = eps_file.mphr\n\n n_node_per_line = raw_data[\"LONGITUDE\"].shape[1]\n n_lines = raw_data[\"LONGITUDE\"].shape[0]\n n_records = raw_data[\"LONGITUDE\"].size\n\n data = {}\n metadata = {}\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n ascat_time = shortcdstime2jd(raw_data[\"UTC_LINE_NODES\"].flatten()[\"day\"],\n raw_data[\"UTC_LINE_NODES\"].flatten()[\"time\"])\n data[\"jd\"] = ascat_time[idx_nodes]\n\n metadata[\"spacecraft_id\"] = np.int8(mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(mphr[\"ORBIT_START\"])\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n\n for f in fields:\n metadata[f] = np.int16(mphr[f.upper()])\n\n fields = [\"sat_track_azi\"]\n for f in fields:\n data[f] = raw_data[f.upper()].flatten()[idx_nodes]\n\n fields = [(\"longitude\", long_nan), (\"latitude\", long_nan),\n (\"swath_indicator\", byte_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].flatten()\n valid = raw_unscaled[f.upper()].flatten() != nan_val\n data[f][~valid] = nan_val\n\n fields = [(\"sigma0_trip\", long_nan), (\"inc_angle_trip\", uint_nan),\n (\"azi_angle_trip\", int_nan), (\"kp\", uint_nan),\n (\"f_kp\", byte_nan), (\"f_usable\", byte_nan), (\"f_f\", uint_nan),\n (\"f_v\", uint_nan), (\"f_oa\", uint_nan), (\"f_sa\", uint_nan),\n (\"f_tel\", uint_nan), (\"f_land\", uint_nan)]\n\n for f, nan_val in fields:\n data[f] = raw_data[f.upper()].reshape(n_records, 3)\n valid = raw_unscaled[f.upper()].reshape(n_records, 3) != nan_val\n data[f][~valid] = nan_val\n\n # modify longitudes from (0, 360) to (-180,180)\n mask = np.logical_and(data[\"longitude\"] != long_nan,\n data[\"longitude\"] > 180)\n data[\"longitude\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n mask = (data[\"azi_angle_trip\"] != int_nan) & (data[\"azi_angle_trip\"] < 0)\n data[\"azi_angle_trip\"][mask] += 360\n\n data[\"node_num\"] = np.tile((np.arange(n_node_per_line) + 1),\n n_lines).astype(np.uint8)\n data[\"line_num\"] = idx_nodes.astype(np.uint16)\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n return data, metadata",
"def oddr_tofrom_oddr(newsystem, coord):\n return newsystem.coord(x=coord.x, y=coord.y)",
"def burn_to_apo(mission):\n vessel = mission.conn.space_center.active_vessel\n ap = vessel.auto_pilot\n\n apoapsis = vessel.orbit.apoapsis_altitude\n half_period = vessel.orbit.period / 2\n apo_time = vessel.orbit.time_to_apoapsis\n target_altitude = mission.parameters.get('target_altitude', 100000)\n target_apt = mission.parameters.get('target_apt', 40)\n max_autostage = mission.parameters.get('max_autostage', 0)\n min_pitch = mission.parameters.get('min_pitch_pid', -15)\n max_pitch = mission.parameters.get('max_pitch_pid', 15)\n\n if mission.current_step[\"first_call\"]:\n mission.parameters[\"pid\"] = PID(0.5, 0.05, 0.2, min_pitch, max_pitch)\n vessel.control.throttle = 1\n\n if apoapsis > target_altitude:\n del mission.parameters[\"pid\"]\n vessel.control.throttle = 0\n mission.next('coast_to_space')\n return\n\n auto_stage(vessel, max_autostage)\n\n if half_period < apo_time:\n target_pitch = max_pitch\n else:\n target_pitch = mission.parameters[\"pid\"].seek(target_apt, apo_time, mission.ut())\n\n ap.engage()\n ap.target_pitch_and_heading(target_pitch, 90)\n mission.parameters[\"target_pitch\"] = target_pitch",
"def writeAD(self):\n ofname = self.ad_file\n ofh = open(ofname,'w')\n\n for line in self.lines_ad:\n f = line.strip().split()\n if (len(f) > 1 and f[1] == 'WindFile'):\n if (self.wind_file != None):\n f[0] = \"\\\"\"+self.wind_file+\"\\\"\"\n line = unsplit(f)\n ofh.write(line)\n\n ofh.close()\n\n # now also copy relevant airfoil files, if path is relative\n tmp = self.af_dict['polar_files'][0]\n if not os.path.isabs(tmp):\n tmp = tmp.split(\"\\\\\")\n tmp = tmp[0].split(\"/\")[0]\n # tmp is now root of relative path to airfoils\n dst = tmp\n src = os.path.join(self.fst_dir, tmp)\n print \"copying aerodata from \", src, \"TO \", dst\n if (not os.path.isdir(dst)):\n shutil.copytree(src, dst)\n\n # copy of relevant wind file in separate function writeWnd",
"def save(aircraft, settings):\n\n filepath = settings.paths('f_aircraft')\n logger.info(f\"Writing aircraft model to file '{truncate_filepath(filepath)}'...\")\n\n # ====== Aircraft top level =====\n output = {}\n output['uid'] = aircraft.uid\n\n output['refs'] = {}\n for key, value in aircraft.refs.items():\n output['refs'][key] = value\n\n # ====== Wings =====\n output['wings'] = []\n for wing in aircraft.wings.values():\n wing_entry = {}\n wing_entry['uid'] = wing.uid\n wing_entry['symmetry'] = wing.symmetry\n\n # ====== Segments =====\n wing_entry['segments'] = []\n for segment in wing.segments.values():\n segment_entry = {}\n segment_entry['uid'] = segment.uid\n segment_entry['vertices'] = dict(segment.vertices)\n\n segment_entry['geometry'] = {}\n for key, value in segment.geometry.items():\n segment_entry['geometry'][key] = value\n\n segment_entry['airfoils'] = {}\n for key, value in segment.airfoils.items():\n # If airfoil is \"blade\" file, make sure to save as relative path\n # Note: Airfoil definition may also be for instance \"NACA1234\"\n if \"blade.\" in value:\n # Make path relative!\n value = os.path.join(PATHS.DIR.AIRFOILS, os.path.basename(value))\n segment_entry['airfoils'][key] = value\n\n segment_entry['panels'] = {}\n for key, value in segment.panels.items():\n segment_entry['panels'][key] = value\n\n wing_entry['segments'].append(segment_entry)\n\n # ====== Controls =====\n wing_entry['controls'] = []\n for control in wing.controls.values():\n control_entry = {}\n\n control_entry['uid'] = control.uid\n control_entry['device_type'] = control.device_type\n control_entry['deflection'] = control.deflection\n control_entry['deflection_mirror'] = control.deflection_mirror\n\n control_entry['segment_uid'] = {}\n for key, value in control.segment_uid.items():\n control_entry['segment_uid'][key] = value\n\n control_entry['rel_vertices'] = {}\n for key, value in control.rel_vertices.items():\n control_entry['rel_vertices'][key] = value\n\n control_entry['rel_hinge_vertices'] = {}\n for key, value in control.rel_hinge_vertices.items():\n control_entry['rel_hinge_vertices'][key] = value\n\n control_entry['panels'] = {}\n for key, value in control.panels.items():\n control_entry['panels'][key] = value\n\n wing_entry['controls'].append(control_entry)\n\n output['wings'].append(wing_entry)\n\n with open(filepath, 'w') as fp:\n dump_pretty_json(output, fp)",
"def _gto_from_ccdata(self):\n\n gbasis = self.ccdata.gbasis\n lines = []\n\n for no, basis in enumerate(gbasis):\n lines.append(f\"{no + 1:3d} 0\")\n for prims in basis:\n lines.append(f\"{prims[0].lower():s} {len(prims[1]):5d} 1.00\")\n for prim in prims[1]:\n lines.append(f\"{prim[0]:15.9e} {prim[1]:15.9e}\")\n lines.append('')\n lines.append('')\n return lines",
"def isoformat(self):\n return \"\"",
"def convert(self):\r\n \r\n try:\r\n log('Processing %s ...' % self.rapid_output_file_list[0], 'INFO')\r\n time_start_conversion = datetime.utcnow()\r\n\r\n # Validate the raw netCDF file\r\n log('validating input netCDF file', 'INFO')\r\n id_len, time_len = self._validate_raw_nc()\r\n\r\n # Initialize the output file (create dimensions and variables)\r\n log('initializing output', 'INFO')\r\n self._initialize_output(time_len, id_len)\r\n\r\n self._generate_time_values()\r\n \r\n #copy river ids over\r\n self.cf_nc.variables[self.output_id_dim_name][:] = self.raw_nc_list[0].get_river_id_array()\r\n\r\n # Populate comid, lat, lon, z\r\n log('writing comid lat lon z', 'INFO')\r\n lookup_start = datetime.now()\r\n self._write_comid_lat_lon_z()\r\n duration = str((datetime.now() - lookup_start).total_seconds())\r\n log('Lookup Duration (s): ' + duration, 'INFO')\r\n\r\n # Create a variable for streamflow. This is big, and slows down\r\n # previous steps if we do it earlier.\r\n self._copy_streamflow_values()\r\n \r\n #close files\r\n for raw_nc in self.raw_nc_list:\r\n raw_nc.close()\r\n self.cf_nc.close()\r\n \r\n #delete original RAPID output\r\n remove_files(*self.rapid_output_file_list)\r\n\r\n #rename nc compliant file to original name\r\n os.rename(self.cf_compliant_file, self.rapid_output_file_list[0])\r\n log('Time to process %s' % (datetime.utcnow()-time_start_conversion), 'INFO')\r\n except Exception:\r\n #delete cf RAPID output\r\n remove_files(self.cf_compliant_file)\r\n #log('Conversion Error %s' % e, 'ERROR')\r\n raise",
"def write_imp_ASCII(DT, lat_lon_r, BX, BY, BZ, Label,\n olat_olon_or, obsX, obsY, obsZ, obsInc, obsID,\n filename='impOut.zip'):\n\n# def write_antti(DT, Lat, Lon, BX, BY, BZ, Label,\n# obsLat, obsLon, obsInc, obsID,\n# dt_file = 'DateTime.txt.gz',\n# location_file = 'LatLon.txt.gz',\n# bx_file = 'BX.txt.gz',\n# by_file = 'BY.txt.gz',\n# bz_file = 'BZ.txt.gz',\n# station_file = 'Stations.txt.gz'):\n\n # unpack former tuple arguments (see PEP-3113)\n Lat, Lon, Rad = lat_lon_r\n obsLat, obsLon, obsRad = olat_olon_or\n\n # create a temporary directory\n tmpDir = tempfile.mkdtemp()\n\n # set filenames\n dt_file = os.path.join(tmpDir, 'DateTime.txt')\n location_file = os.path.join(tmpDir, 'LatLon.txt')\n bx_file = os.path.join(tmpDir, 'BX.txt')\n by_file = os.path.join(tmpDir, 'BY.txt')\n bz_file = os.path.join(tmpDir, 'BZ.txt')\n obx_file = os.path.join(tmpDir, 'obsBX.txt')\n oby_file = os.path.join(tmpDir, 'obsBY.txt')\n obz_file = os.path.join(tmpDir, 'obsBZ.txt')\n station_file = os.path.join(tmpDir, 'Stations.txt')\n\n # write out ASCII files\n _write_antti_datetime(DT, dt_file)\n _write_antti_location(Lat, Lon, Rad, Label, location_file)\n _write_antti_component(BX, 'X (northward) component', bx_file)\n _write_antti_component(BY, 'Y (eastward) component', by_file)\n _write_antti_component(BZ, 'Z (downward) component', bz_file)\n _write_antti_stations(obsLat, obsLon, obsRad, obsInc, obsID, station_file)\n\n # not a part of original ASCII format, but included for completeness\n _write_antti_component(obsX, 'observed X (northward) component', obx_file)\n _write_antti_component(obsY, 'observed Y (eastward) component', oby_file)\n _write_antti_component(obsZ, 'observed Z (downward) component', obz_file)\n\n # open up output zip file\n with zipfile.ZipFile(filename, 'w', zipfile.ZIP_DEFLATED) as outZip:\n outZip.write(dt_file, os.path.basename(dt_file))\n outZip.write(location_file, os.path.basename(location_file))\n outZip.write(bx_file, os.path.basename(bx_file))\n outZip.write(by_file, os.path.basename(by_file))\n outZip.write(bz_file, os.path.basename(bz_file))\n outZip.write(obx_file, os.path.basename(obx_file))\n outZip.write(oby_file, os.path.basename(oby_file))\n outZip.write(obz_file, os.path.basename(obz_file))\n outZip.write(station_file, os.path.basename(station_file))\n\n shutil.rmtree(tmpDir)",
"def distribute_to_indiv_files(API_res, tempo_dname='../data/__tempo'):\n format='.csv'\n if API_res is not None:\n global aircraft_database\n try:\n aircraft_database\n except:\n aircraft_database = pd.read_csv('../data/aircraftDatabase.csv', low_memory=False)\n \n if not os.path.exists(tempo_dname):\n os.mkdir(tempo_dname)\n t = API_res.time\n for state in API_res.states:\n id_dict = { 'icao24': state.icao24.strip() }\n conditions = {'manufacturericao': 'AIRBUS', 'typecode': r'\\bA318\\b|\\bA319\\b|\\bA320\\b|\\bA321\\b'}\n if not match_conditions(id_dict, conditions, aircraft_database):\n continue\n print(f'Got one! {state.icao24}, {state.callsign}')\n fname = state.icao24.strip() + '_' + state.callsign.strip()\n if not os.path.exists(tempo_dname + '/' + fname + format):\n f = open(tempo_dname + '/' + fname + format, 'a+')\n f.write('time,lat,lon,velocity,heading,vertrate,onground,spi,squawk,baroaltitude,geoaltitude,lastposupdate,lastcontact \\n')\n else:\n f = open(tempo_dname + '/' + fname + format, 'a+')\n f.write(','.join(map(str, [t, state.latitude, state.longitude, state.velocity, state.heading, state.vertical_rate, state.on_ground, \n state.spi, state.squawk, state.baro_altitude, state.geo_altitude, state.time_position, state.last_contact])) + '\\n')\n f.close()",
"def ase2xyz(atoms):\n atoms.write('temp.xyz', append=False)\n xyz = \"\"\n with open('temp.xyz', 'r') as f:\n for line in f:\n xyz += line\n return xyz",
"def oath(ctx):\n\n dev = ctx.obj[\"device\"]\n conn = dev.open_connection(SmartCardConnection)\n ctx.call_on_close(conn.close)\n ctx.obj[\"session\"] = OathSession(conn)\n ctx.obj[\"oath_keys\"] = AppData(\"oath_keys\")",
"def initialise_indoor_air_quality(self):\n # name, command, signals, delay\n self._i2c_read_words_from_cmd(command=[0x20, 0x03], reply_size=0, delay=0.01)",
"def isoformat(self, *args, **kwargs): # real signature unknown\r\n pass",
"def wline():\n # this is the alpha\n inlist = list(\"begin\") # change data into a list element\n outlist[0:5] = inlist # place data in the list in the correct place\n # print(\"\".join(outlist)) # see result\n # this is the omega\n inlist = list(\"end\") # change data into a list element\n outlist[1247:1250] = inlist # place data in the list in the correct place\n # ok, pack em up...\n outstr = \"\".join(outlist)\n print(outstr)\n # print(\"Length is \"+lswtchro()en(outstr))\n print(len(outstr))\n # of = open(\"workfile\", \"w\")\n of.write(outstr)",
"def extractOldALFOSCHeader(file):\n\n try:\n\n hdulist = pyfits.open(file)\n hdulist.close() \n\n # Extract primary header unit\n ph = extractHDU(file,0)\n\n # Form a proper timestamp from a float type UT \n ut = requireValidFloat('UT',ph) \n hh = int(ut)\n mm = int((ut-hh)*60)\n ss = int((((ut-hh)*60)-mm)*60)\n timestamp = \"%02d:%02d:%02d\" % (hh,mm,ss)\n date_obs = requireValidString('DATE-OBS', ph)\n\n fitsheader = {\n 'imagetyp': ph.get('IMAGETYP', 'na').strip() or 'na',\n 'exptime'\t: requireValidFloat('EXPTIME',ph),\t\t\t\n 'azimuth'\t: '0.00', \t\n 'austatus': 'na',\t\n 'telfocus': requireValidInt('TELFOCUS', ph),\n 'gain'\t: '0.726',\n 'alfltid'\t: requireValidInt('FILTID', ph),\n 'alfltnm'\t: requireValidString('FILTER', ph),\t \t\n 'fafltid'\t: requireValidInt('AFILTID', ph),\n 'fafltnm'\t: requireValidString('AFILTER', ph),\n 'fbfltid'\t: requireValidInt('BFILTID', ph),\n 'fbfltnm'\t: requireValidString('BFILTER', ph),\t\t\n 'rotpos' : requireValidFloat('ROTPOS',ph),\n 'apertur' : requireValidString('APERTUR', ph),\n 'ra' : '%.2f' % requireValidFloat('RA',ph),\n 'decl' : '%.2f' % requireValidFloat('DEC',ph) \n\t\n }\n \n fitsheader['dateobs'] = \"%sT%s\" % (date_obs, timestamp)\n\n # Calculate telescope altitude from airmass\n airmass = requireValidFloat('AIRMASS',ph)\n fitsheader['telalt'] = '%.2f' % (90 - degrees(pi/2 - asin(1/airmass))) \n\n # Calculate pixel scale\n cd1_1 = requireValidInt('CDELT1', ph)\n fitsheader['pscale'] = str(cd1_1 * 0.19)\n\n fitsheader['instrume'] = 'alfosc'\n\n if (fitsheader['exptime'] > 1.0) and (requireValidString('GRISM', ph) == 'Open_(Lyot)'):\n fitsheader['imaging'] = 1\n else:\n fitsheader['imaging'] = 0\t\n\n fitsheader['keys'] = ['dateobs','telalt','azimuth','rotpos','ra','decl','telfocus','pscale','gain',\n\t\t'apertur','alfltid','alfltnm','fafltid','fafltnm','fbfltid','fbfltnm',\n\t\t'imagetyp','exptime','austatus']\n\n except HeaderException, e:\n return ['ERROR']\n\n\n return fitsheader",
"def __bytes__(self):\n line1=self.name.encode(\"ascii\").ljust(24,b\" \")\n line2=b\"1 %05dU %02d%03d%-3b %02d%012.8f %c.%08d %c%05d%+01d %c%05d%+01d 0 %04d\" %\\\n (self.id,self.desig[\"year\"]%100,self.desig[\"launch\"],\\\n self.desig[\"object\"].encode(\"ascii\"),self.epoch[\"year\"]%100,\\\n self.epoch[\"day\"],b\"-\" if self.fdmm<0 else b\" \",abs(self.fdmm*1.e8),\\\n b\"-\" if self.sdmm<0 else b\" \",\\\n abs(self.sdmm*pow(10,5-(ceil(log(abs(self.sdmm),10)) if \\\n abs(self.sdmm)>0 else 0))),\\\n (ceil(log(abs(self.sdmm),10)) if abs(self.sdmm)>0 else 0),\\\n b\"-\" if self.bstar<0 else b\" \",\\\n abs(self.bstar*pow(10,5-(ceil(log(abs(self.bstar),10)) if \\\n abs(self.bstar)>0 else 0))),\\\n (ceil(log(abs(self.bstar),10)) if abs(self.bstar)>0 else 0),\\\n self.nr,)\n line3=b\"2 %05d %08.4f %08.4f %07d %08.4f %08.4f %011.8f%05d\" %\\\n (self.id,self.inc,self.raan,self.ecc*1.e7,self.aop,\\\n self.ma,self.mm,self.revol,)\n l2cs=0\n for c in line2:\n bc=bytes([c])\n if bc.isdigit():\n l2cs+=int(bc.decode(\"ascii\"))\n elif bc==b\"-\":\n l2cs+=1\n l2cs%=10\n\n l3cs=0\n for c in line3:\n bc=bytes([c])\n if bc.isdigit():\n l3cs+=int(bc.decode(\"ascii\"))\n elif bc==b\"-\":\n l3cs+=1\n l3cs%=10\n return line1+b\"\\r\\n\"+line2+str(l2cs).encode(\"ascii\")+b\"\\r\\n\"+line3+\\\n str(l3cs).encode(\"ascii\")+b\"\\r\\n\"",
"def to_isoformat(self) -> str:\n return self.isoformat()",
"def createTOFin(En):\n ftemplate = open(\"TOFtemplate.in\", \"r\")\n lines = ftemplate.readlines()\n ftofin = open(\"TOF.in\", \"w\") \n energyline = lines[12].split()\n lines[12] = \"%s %g %s\\n\"%(energyline[0], En, energyline[2])\n ftofin.writelines(lines)\n ftemplate.close()\n ftofin.close()",
"def eta2abc(parameter):\r\n # PV2AC conversion pathway TODO\r\n if parameter['Top'] == 'DC' or parameter['Top'] == 'PVINV' or parameter['Top'] == 'PV' and parameter['P_PV2AC_out'] is not None or parameter['Top'] == 'AC' and parameter['P_PV2AC_out'] is not None:\r\n \r\n # Create variables for the sampling points and corresponding efficiencies TODO\r\n p_pv2ac = np.fromiter((value for key, value in parameter.items() if 'p_PV2AC_' in key and value is not None), float)\r\n eta_pv2ac = np.fromiter((value / 100 for key, value in parameter.items() if 'eta_PV2AC_' in key and value is not None), float)\r\n\r\n # Absolute input and output power in W\r\n p_pv2ac_out = parameter['P_PV2AC_out'] * p_pv2ac * 1000\r\n p_pv2ac_in = p_pv2ac_out / eta_pv2ac\r\n\r\n # Absolute power loss in W\r\n P_l_pv2ac_in = (1 - eta_pv2ac) * p_pv2ac_in\r\n P_l_pv2ac_out = (1 / eta_pv2ac - 1) * p_pv2ac_out\r\n\r\n # Polynomial curve fitting parameters of the power loss functions in W\r\n \r\n # Based on input power\r\n p = np.polyfit(p_pv2ac_in / parameter['P_PV2AC_in'] / 1000, P_l_pv2ac_in, 2)\r\n parameter['PV2AC_a_in'] = p[0]\r\n parameter['PV2AC_b_in'] = p[1]\r\n parameter['PV2AC_c_in'] = p[2]\r\n\r\n # Based on output power\r\n p = np.polyfit(p_pv2ac, P_l_pv2ac_out, 2)\r\n parameter['PV2AC_a_out'] = p[0]\r\n parameter['PV2AC_b_out'] = p[1]\r\n parameter['PV2AC_c_out'] = p[2]\r\n \r\n # PV2BAT conversion pathway\r\n if parameter['Top'] == 'DC' or parameter['Top'] == 'PV':\r\n\r\n # Create variables for the sampling points and corresponding efficiencies\r\n p_pv2bat = np.array([value for key, value in parameter.items() if 'p_PV2BAT_' in key])\r\n eta_pv2bat = np.array([value / 100 for key, value in parameter.items() if 'eta_PV2BAT_' in key])\r\n\r\n # Create missing variables\r\n\r\n # Nominal input power of the PV2BAT conversion pathway of DC-coupled systems\r\n if parameter['P_PV2BAT_in'] is None:\r\n parameter['P_PV2BAT_in'] = parameter['P_PV2BAT_out'] / (parameter['eta_PV2BAT_100'] / 100)\r\n\r\n # Absolute input and output power in W\r\n p_pv2bat_out = parameter['P_PV2BAT_out'] * p_pv2bat * 1000\r\n p_pv2bat_in = p_pv2bat_out / eta_pv2bat\r\n\r\n # Absolute power loss in W\r\n P_l_pv2bat_in = (1 - eta_pv2bat) * p_pv2bat_in\r\n P_l_pv2bat_out = (1 / eta_pv2bat - 1) * p_pv2bat_out\r\n \r\n # Polynomial curve fitting parameters of the power loss functions in W\r\n \r\n # Based on input power\r\n p = np.polyfit(p_pv2bat_in / parameter['P_PV2BAT_in'] / 1000, P_l_pv2bat_in, 2)\r\n parameter['PV2BAT_a_in'] = p[0]\r\n parameter['PV2BAT_b_in'] = p[1]\r\n parameter['PV2BAT_c_in'] = p[2]\r\n\r\n # Based on output power\r\n p = np.polyfit(p_pv2bat, P_l_pv2bat_out, 2)\r\n parameter['PV2BAT_a_out'] = p[0]\r\n parameter['PV2BAT_b_out'] = p[1]\r\n parameter['PV2BAT_c_out'] = p[2]\r\n \r\n # AC2BAT conversion pathway\r\n if parameter['Top'] == 'AC' or parameter['Top'] == 'DC' and parameter['P_AC2BAT_in'] is not None:\r\n\r\n # Create variables for the sampling points and corresponding efficiencies TODO\r\n p_ac2bat = np.fromiter((value for key, value in parameter.items() if 'p_AC2BAT_' in key), float)\r\n eta_ac2bat = np.fromiter((value / 100 for key, value in parameter.items() if 'eta_AC2BAT_' in key), float)\r\n\r\n # Absolute input and output power in W\r\n p_ac2bat_out = parameter['P_PV2BAT_out'] * p_ac2bat * 1000\r\n p_ac2bat_in = p_ac2bat_out / eta_ac2bat\r\n\r\n # Absolute power loss in W\r\n P_l_ac2bat_in = (1 - eta_ac2bat) * p_ac2bat_in\r\n P_l_ac2bat_out = (1 / eta_ac2bat - 1) * p_ac2bat_out\r\n\r\n # Polynomial curve fitting parameters of the power loss functions in W\r\n \r\n # Based on input power\r\n p = np.polyfit(p_ac2bat_in / parameter['P_AC2BAT_in'] / 1000, P_l_ac2bat_in, 2)\r\n parameter['AC2BAT_a_in'] = p[0]\r\n parameter['AC2BAT_b_in'] = p[1]\r\n parameter['AC2BAT_c_in'] = p[2]\r\n\r\n # Based on output power\r\n p = np.polyfit(p_ac2bat, P_l_ac2bat_out, 2)\r\n parameter['AC2BAT_a_out'] = p[0]\r\n parameter['AC2BAT_b_out'] = p[1]\r\n parameter['AC2BAT_c_out'] = p[2]\r\n \r\n # BAT2AC conversion pathway\r\n if parameter['Top'] =='AC' or parameter['Top'] =='DC' or parameter['Top'] =='PV' and parameter['P_BAT2AC_out'] is not None:\r\n\r\n # Create variables for the sampling points and corresponding efficiencies TODO\r\n p_bat2ac = np.fromiter((value for key, value in parameter.items() if 'p_BAT2AC_' in key), float)\r\n eta_bat2ac = np.fromiter((value / 100 for key, value in parameter.items() if 'eta_BAT2AC_' in key), float)\r\n\r\n # Absolute input and output power in W\r\n p_bat2ac_out = parameter['P_BAT2AC_out'] * p_bat2ac * 1000\r\n p_bat2ac_in = p_bat2ac_out / eta_bat2ac\r\n\r\n # Absolute power loss in W\r\n P_l_bat2ac_in = (1 - eta_bat2ac) * p_bat2ac_in\r\n P_l_bat2ac_out = (1 / eta_bat2ac - 1) * p_bat2ac_out\r\n\r\n # Polynomial curve fitting parameters of the power loss functions in W\r\n \r\n # Based on input power\r\n p = np.polyfit(p_bat2ac_in / parameter['P_BAT2AC_in'] / 1000, P_l_bat2ac_in, 2)\r\n parameter['BAT2AC_a_in'] = p[0]\r\n parameter['BAT2AC_b_in'] = p[1]\r\n parameter['BAT2AC_c_in'] = p[2]\r\n\r\n # Based on output power\r\n p = np.polyfit(p_bat2ac, P_l_bat2ac_out, 2)\r\n parameter['BAT2AC_a_out'] = p[0]\r\n parameter['BAT2AC_b_out'] = p[1]\r\n parameter['BAT2AC_c_out'] = p[2]\r\n \r\n # BAT2PV conversion pathway\r\n if parameter['Top'] =='PV':\r\n\r\n # Create variables for the sampling points and corresponding efficiencies TODO\r\n p_bat2pv = np.fromiter((value for key, value in parameter.items() if 'p_BAT2PV_' in key), float)\r\n eta_bat2pv = np.fromiter((value / 100 for key, value in parameter.items() if 'eta_BAT2PV_' in key), float)\r\n\r\n # Absolute input and output power in W\r\n p_bat2pv_out = parameter['P_BAT2PV_out'] * p_bat2pv * 1000\r\n p_bat2pv_in = p_bat2pv_out / eta_bat2pv\r\n\r\n # Absolute power loss in W\r\n P_l_bat2pv_in = (1 - eta_bat2pv) * p_bat2pv_in\r\n P_l_bat2pv_out = (1 / eta_bat2pv - 1) * p_bat2pv_out\r\n\r\n # Polynomial curve fitting parameters of the power loss functions in W\r\n \r\n # Based on input power TODO\r\n p = np.polyfit(p_bat2pv_in / parameter['P_BAT2AC_in'] / 1000, P_l_bat2pv_in, 2)\r\n parameter['BAT2PV_a_in'] = p[0]\r\n parameter['BAT2PV_b_in'] = p[1]\r\n parameter['BAT2PV_c_in'] = p[2]\r\n\r\n # Based on output power\r\n p = np.polyfit(p_bat2pv, P_l_bat2pv_out, 2)\r\n parameter['BAT2PV_a_out'] = p[0]\r\n parameter['BAT2PV_b_out'] = p[1]\r\n parameter['BAT2PV_c_out'] = p[2]\r\n \r\n # Additional parameters\r\n\r\n # Mean battery capacity in kWh\r\n try:\r\n parameter['E_BAT'] = (parameter['E_BAT_usable'] / parameter['eta_BAT'] * 100 + parameter['E_BAT_usable']) / 2\r\n except:\r\n parameter['E_BAT'] = None\r\n\r\n # Mean stationary deviation of the charging power in W\r\n try:\r\n parameter['P_PV2BAT_DEV'] = parameter['P_PV2BAT_DEV_IMPORT'] - parameter['P_PV2BAT_DEV_EXPORT']\r\n except:\r\n parameter['P_PV2BAT_DEV'] = None\r\n\r\n if parameter['Top'] == 'AC':\r\n parameter['P_AC2BAT_DEV'] = parameter['P_PV2BAT_DEV'] \r\n \r\n # Mean stationary deviation of the discharging power in W\r\n try:\r\n parameter['P_BAT2AC_DEV'] = parameter['P_BAT2AC_DEV_EXPORT'] - parameter['P_BAT2AC_DEV_IMPORT']\r\n except:\r\n parameter['P_BAT2AC_DEV'] = None\r\n \r\n # Time constant for the first-order time delay element in s\r\n try:\r\n parameter['t_CONSTANT'] = (parameter['t_SETTLING'] - round(parameter['t_DEAD'])) / 3\r\n except:\r\n parameter['t_CONSTANT'] = None\r\n\r\n # Hysteresis threshold for the recharging of the battery\r\n parameter['SOC_h'] = 0.98\r\n\r\n # Feed-in power limit in kW/kWp\r\n parameter['p_ac2g_max'] = 0.7\r\n\r\n return parameter",
"def run_azos(ts):\n pgconn = get_dbconn(\"iem\")\n cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)\n\n utcnow = datetime.datetime.utcnow()\n # Now we have the tricky work of finding what 7 AM is\n ts = ts.astimezone(ZoneInfo(\"America/Chicago\"))\n ts1 = ts.replace(hour=7)\n ts0 = ts1 - datetime.timedelta(hours=24)\n cursor.execute(\n \"select t.id, t.name, sum(phour), st_x(geom), st_y(geom) \"\n \"from hourly h JOIN stations t ON \"\n \"(h.iemid = t.iemid) where t.network in ('IA_ASOS', 'SD_ASOS',\"\n \"'NE_ASOS', 'KS_ASOS', 'MO_ASOS', 'IL_ASOS', 'WI_ASOS', 'MN_ASOS') \"\n \"and valid >= %s and valid < %s GROUP by t.id, t.name, t.geom\",\n (ts0, ts1),\n )\n\n res = {\n \"type\": \"FeatureCollection\",\n \"features\": [],\n \"generation_time\": utcnow.strftime(\"%Y-%m-%dT%H:%M:%SZ\"),\n \"count\": cursor.rowcount,\n }\n for row in cursor:\n res[\"features\"].append(\n dict(\n type=\"Feature\",\n id=row[\"id\"],\n properties=dict(\n pday=p(row[\"sum\"]), snow=None, snowd=None, name=row[\"name\"]\n ),\n geometry=dict(\n type=\"Point\", coordinates=[row[\"st_x\"], row[\"st_y\"]]\n ),\n )\n )\n\n return json.dumps(res)",
"def airl():\n algorithm = \"airl\"",
"def extract_hrc_evt2(obsid):\n#\n#--- write required arc4gl command\n#\n line = 'operation=retrieve\\n'\n line = line + 'dataset=flight\\n'\n line = line + 'detector=hrc\\n'\n line = line + 'level=2\\n'\n line = line + 'filetype=evt2\\n'\n line = line + 'obsid=' + str(obsid) + '\\n'\n line = line + 'go\\n'\n f = open(zspace, 'w')\n f.write(line)\n f.close()\n\n cmd1 = \"/usr/bin/env PERL5LIB=\"\n cmd2 = ' echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i' + zspace\n cmd = cmd1 + cmd2\n\n#\n#--- run arc4gl\n#\n bash(cmd, env=ascdsenv)\n mcf.rm_file(zspace)\n#\n#--- check the data is actually extracted\n#\n cmd = 'ls *'+ str(obsid) + '*evt2.fits.gz >' + zspace\n os.system(cmd)\n f = open(zspace, 'r')\n data = [line.strip() for line in f.readlines()]\n f.close()\n mcf.rm_file(zspace)\n\n if len(data) > 0:\n os.system('gzip -d *.gz')\n file = data[0]\n file = file.replace('.gz', '')\n return file\n else:\n return 'na'",
"def merge_energy_datatypes(osm_path): \n #extract line data\n df_line = powerline_limited(osm_path) #extract required data\n if 'asset' in df_line.columns:\n df_line['asset'] = list(map(lambda x: x.lower(), df_line['asset'])) #make sure that asset column is in lowercase characters\n #reclassify assets \n mapping_dict = {\n \"cable\" : \"cable\", #underground\n \"minor_cable\" : \"cable\", \n #\"generator\" : \"generator\", #device used to convert power from one form to another\n \"line\" : \"line\", #overground\n \"minor_line\" : \"minor_line\", #overground\n #\"plant\" : \"plant\", #place where power is generated\n #\"substation\" : \"substation\"\n }\n df_line['asset'] = df_line.asset.apply(lambda x : mapping_dict[x]) #reclassification \n\n if 'voltage' in df_line.columns:\n df_line = df_line.drop(['voltage'], axis=1) \n \n #extract polygon data\n df_poly = power_polygon(osm_path) #extract required data\n df_poly['geometry'] =pygeos.buffer(df_poly.geometry,0) #avoid intersection\n \n #extract point data\n df_point = power_point(osm_path) #extract required data\n \n return pandas.concat([df_line, df_poly, df_point], ignore_index=True)",
"def make_outstr_ch( target, mjd, utc_tstart_dt, utc_tend_dt, zenith_midtime, airmass, \\\n trtype, moonpos, moondist, moonphase ):\n\n target_str = target.replace( ' ', '' ).rjust( 11 )\n mjd_str = '{0:.2f}'.format( mjd ).center( 8 )\n \n utc_tstart_str = '{0:04d}:{1:02d}:{2:02d}:{3:02d}:{4:02d}:{5:02d}'\\\n .format( utc_tstart_dt.year, \\\n utc_tstart_dt.month, \\\n utc_tstart_dt.day, \\\n utc_tstart_dt.hour, \\\n utc_tstart_dt.minute, \\\n utc_tstart_dt.second )\n utc_tstart_str = utc_tstart_str.center( 19 )\n \n utc_tend_str = '{0:04d}:{1:02d}:{2:02d}:{3:02d}:{4:02d}:{5:02d}'\\\n .format( utc_tend_dt.year, \\\n utc_tend_dt.month, \\\n utc_tend_dt.day, \\\n utc_tend_dt.hour, \\\n utc_tend_dt.minute, \\\n utc_tend_dt.second )\n utc_tend_str = utc_tend_str.center( 19 )\n\n zenith_str = '{0:d}'.format( int( np.round( zenith_midtime ) ) ).center( 6 )\n airmass_str = '{0:.2f}'.format( airmass ).center( 4 )\n trtype_str = trtype.center( 21 )\n moonpos_str = moonpos.center( 12 )\n moondist_str = moondist.center( 9 )\n moonphase_str = moonphase.center( 10 )\n outstr = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9}\\n'\\\n .format( target_str, \\\n mjd_str, \\\n utc_tstart_str, \\\n utc_tend_str, \\\n zenith_str, \\\n airmass_str, \\\n trtype_str, \\\n moonpos_str, \\\n moondist_str, \\\n moonphase_str )\n \n return outstr",
"def exo2():",
"def export_to_ascii(self):\n t3 = self.data.t3\n # get wavelength data\n wav = self.data.wavelength[\"WAVELENGTH_NAME\"].eff_wave[0]\n\n # output u1, v1, u2, v2, u3, v3, t3amp, t3phi, t3err\n t3data = [\n [\n t3[i].u1coord / wav,\n t3[i].v1coord / wav,\n t3[i].u2coord / wav,\n t3[i].v2coord / wav,\n -(t3[i].u1coord + t3[i].u2coord) / wav,\n -(t3[i].v1coord + t3[i].v2coord) / wav,\n t3[i].t3amp[0],\n t3[i].t3phi[0],\n t3[i].t3amperr[0],\n t3[i].t3phierr[0],\n ]\n for i in range(len(t3))\n ]\n\n self.t3data = np.array(t3data)\n return self.t3data",
"def powerline(osm_path): \n return (retrieve(osm_path,'lines',['power','voltage'],**{'power':[\"='cable' or \",\"='line' or \",\"='minor_line' or \",\"='minor_cable' or \",\"='plant' or \",\"='substation'\"]})).rename(columns={'power': 'asset'})",
"def convert_amber_atomtype_to_rosetta_atomtype(self):\n\n tmpfile = open(\"tmp.mol2\", 'w')\n with open(\"ligand_am1_bcc.mol2\",'r') as f:\n atoms = False\n\n for line in f:\n\n print \"ATOM\", line.find(\"@<TRIPOS>ATOM\"),line\n print \"BOND\", line.find(\"@<TRIPOS>BOND\"),line\n\n if ( len(line) > 13 and line.find(\"@<TRIPOS>ATOM\") >-1.0):\n atoms = True\n\n elif ( len(line) > 13 and line.find(\"@<TRIPOS>BOND\") >-1.0):\n atoms = False\n\n elif( atoms == True and len(line) > 75 ):\n tmp_characters = line[47]+\".\"+line[48]\n line = line[0:47]+tmp_characters+line[50:]\n\n tmpfile.write(line)\n tmpfile.close()",
"def get_air_absorption(self):\n\n key = \"\"\n\n if self.T < 15:\n key += \"10C_\"\n else:\n key = \"20C_\"\n\n if self.H < 50:\n key += \"30-50%\"\n elif 50 <= self.H and self.H < 70:\n key += \"50-70%\"\n else:\n key += \"70-90%\"\n\n return {\n \"coeffs\": air_absorption_table[key],\n \"center_freqs\": air_absorption_table[\"center_freqs\"],\n }",
"def air_to_vac(self, wave):\n #Convert to um\n wave_um = wave*.001\n ohm2 = (1./wave_um)**(2)\n\n #Calculate index at every wavelength\n nn = []\n for x in ohm2:\n n = 1+10**(-8)*(8342.13 + (2406030/float(130.-x)) + (15997/float(389-x)))\n nn.append(n)\n \n #Get new wavelength by multiplying by index of refraction\n vac_wave = nn*wave\n return vac_wave",
"def convertOFFtoELENODE( offname ):\n with open(offname, \"r\") as OFF:\n OFFLines = OFF.readlines()\n\n OFFData = []\n for line in OFFLines:\n OFFData.append(line.split())\n \n numVertices = int(OFFData[1][0])\n numFaces = int(OFFData[1][1])\n numPerFace = int(OFFData[2+numVertices+1][0])\n\n outname = offname.split(\".\")[0] #To name the output files\n\n with open( outname + \".ele\", \"w\") as ELE:\n ELE.write( \"{}\\t{}\\t0\\n\".format(numFaces, numPerFace)) #Placing the number of elements, and taking the number of vertices in an element from the first element that appears in the off\n \n for i in range(2 + numVertices, 2 + numVertices + numFaces):\n temp = []\n for j in range( 1, 1+numPerFace):\n temp.append( int(OFFData[i][j]) + 1 )\n\n template = \"{}\\t\" + \"{}\\t\"*numPerFace + \"\\n\"\n ELE.write( template.format( i-numVertices-1, *temp))\n\n with open( outname + \".node\", \"w\") as NODE:\n NODE.write( \"{}\\t2\\t0\\t0\\n\".format(numVertices)) #Placing the number of elements, and taking the number of vertices in an element from the first element that appears in the off\n \n for i in range(2, 2 + numVertices):\n\n template = \"{}\\t{}\\t{}\\n\"\n NODE.write( template.format( i-1, *OFFData[i]))\n \n return",
"def test_xyz_to_ase(self):\n atoms_1 = converter.xyz_to_ase(self.xyz1['dict'])\n self.assertIsInstance(atoms_1, Atoms)\n self.assertEqual(str(atoms_1.symbols), 'CH4')\n np.testing.assert_array_equal(atoms_1.positions, [[0., 0., 0.],\n [0.6300326, 0.6300326, 0.6300326],\n [-0.6300326, -0.6300326, 0.6300326],\n [-0.6300326, 0.6300326, -0.6300326],\n [0.6300326, -0.6300326, -0.6300326]])"
] | [
"0.6572235",
"0.5615951",
"0.5423645",
"0.53658104",
"0.51459473",
"0.51208514",
"0.50073093",
"0.5003773",
"0.49911514",
"0.49624845",
"0.48734692",
"0.48479107",
"0.4842932",
"0.48100558",
"0.47674647",
"0.4759654",
"0.47496668",
"0.4749038",
"0.4746655",
"0.4734282",
"0.47204",
"0.47190318",
"0.4710448",
"0.4704642",
"0.46851444",
"0.46822503",
"0.4676338",
"0.46681082",
"0.4667365",
"0.46631727",
"0.46497762",
"0.46491584",
"0.46253726",
"0.46100745",
"0.46021757",
"0.4598999",
"0.4596606",
"0.45808387",
"0.4559112",
"0.45538616",
"0.45524338",
"0.45305315",
"0.4527826",
"0.4516436",
"0.45016292",
"0.44943947",
"0.4489909",
"0.4489588",
"0.44870773",
"0.44750592",
"0.44716236",
"0.447032",
"0.44679213",
"0.44672918",
"0.4458615",
"0.445858",
"0.445834",
"0.4452061",
"0.44487286",
"0.44477448",
"0.4444274",
"0.44410378",
"0.4436373",
"0.4428802",
"0.4423972",
"0.4422882",
"0.44183633",
"0.44104126",
"0.4409353",
"0.44090486",
"0.44072923",
"0.44051337",
"0.44048953",
"0.44022697",
"0.44001314",
"0.44000086",
"0.43952262",
"0.43838754",
"0.43803826",
"0.4379487",
"0.43741697",
"0.43649665",
"0.43642658",
"0.43639165",
"0.43579745",
"0.43551102",
"0.43503827",
"0.43485644",
"0.43481335",
"0.43460622",
"0.4345215",
"0.43425778",
"0.43412274",
"0.43394417",
"0.43387625",
"0.43374527",
"0.4336007",
"0.43358788",
"0.4335736",
"0.43270573"
] | 0.76431525 | 0 |
uses template in order to make kml format | def make_kml_format(self,kml_template):
if self.as_type == 'A':
self.kml_lines = kml_template['good_subdivided']['placemark']
elif self.as_type == 'B':
self.kml_lines = kml_template['bad_subdivided']['placemark']
else:
print('Unknown airspace type')
# get idx of name and coordinates
idxLine = 0
while idxLine < len(self.kml_lines):
#print(self.kml_lines[idxLine]
if self.kml_lines[idxLine].startswith('\t\t\t\t<name>'): # begin of airspace
idx_name = idxLine
if '\t\t\t\t\t\t\t<coordinates>\n' in self.kml_lines[idxLine]: # begin of airspace
idx_coordinates = idxLine+1
idxLine += 1
# transform coordinates
# add all coordinates: Format is:
# source: 'DP 50:26:22 N 012:17:59 E\n'
# target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0
coo_list = [] # collect list of coorinates as strings
for line in self.txt_lines:
if line.startswith('AN'):
self.name = line[3:].replace('\n','')
self.kml_lines[idx_name] = '\t\t\t\t<name>%s</name>\n' % self.name
if line.startswith('DP'):
# lon
lon_deg = float(line[14:17])
lon_min = float(line[18:20])
lon_sec = float(line[21:23])
lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg
if line[24] == 'W':
lon_dec *= -1 # negative if west
# lat
lat_deg = float(line[3:5])
lat_min = float(line[6:8])
lat_sec = float(line[9:11])
lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg
if line[12] == 'S':
lat_dec *= -1 # negative if west
# attach coordinates
coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec))
# store for later plotting
self.lat_dec.append(lat_dec)
self.lon_dec.append(lon_dec)
# make sure that shape is closed --> first an last point must be the same
if coo_list[0] != coo_list[-1]:
coo_list.append(coo_list[0])
self.lat_dec.append(self.lat_dec[0])
self.lon_dec.append(self.lon_dec[0])
# write coordinate strings into kml
self.kml_lines[idx_coordinates] = '\t\t\t\t\t\t\t\t' # is prefix. Coordinates to be added as string below
for pt in coo_list:
self.kml_lines[idx_coordinates] += pt
print('Converted airspace %s' % self.name) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def generate_document_kml(self, title, content):\n return \"\"\"\\\n<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<kml xmlns=\"http://earth.google.com/kml/2.1\">\n <Document>\n <name>%s</name>\n <description></description>\n <Style>\n <ListStyle id=\"hideChildren\">\n <listItemType>checkHideChildren</listItemType>\n </ListStyle>\n </Style>\n%s\n </Document>\n</kml>\"\"\" % (title.replace('\\\\','/'), content)",
"def generate_kml(tx, ty, tz, tileext, tilesize, tileswne, options, children=None, **args):\n if not children:\n children = []\n\n args['tx'], args['ty'], args['tz'] = tx, ty, tz\n args['tileformat'] = tileext\n if 'tilesize' not in args:\n args['tilesize'] = tilesize\n\n if 'minlodpixels' not in args:\n args['minlodpixels'] = int(args['tilesize'] / 2)\n if 'maxlodpixels' not in args:\n args['maxlodpixels'] = int(args['tilesize'] * 8)\n if children == []:\n args['maxlodpixels'] = -1\n\n if tx is None:\n tilekml = False\n args['title'] = options.title\n else:\n tilekml = True\n args['title'] = \"%d/%d/%d.kml\" % (tz, tx, ty)\n args['south'], args['west'], args['north'], args['east'] = tileswne(tx, ty, tz)\n\n if tx == 0:\n args['drawOrder'] = 2 * tz + 1\n elif tx is not None:\n args['drawOrder'] = 2 * tz\n else:\n args['drawOrder'] = 0\n\n url = options.url\n if not url:\n if tilekml:\n url = \"../../\"\n else:\n url = \"\"\n\n s = \"\"\"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<kml xmlns=\"http://www.opengis.net/kml/2.2\">\n <Document>\n <name>%(title)s</name>\n <description></description>\n <Style>\n <ListStyle id=\"hideChildren\">\n <listItemType>checkHideChildren</listItemType>\n </ListStyle>\n </Style>\"\"\" % args\n if tilekml:\n s += \"\"\"\n <Region>\n <LatLonAltBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonAltBox>\n <Lod>\n <minLodPixels>%(minlodpixels)d</minLodPixels>\n <maxLodPixels>%(maxlodpixels)d</maxLodPixels>\n </Lod>\n </Region>\n <GroundOverlay>\n <drawOrder>%(drawOrder)d</drawOrder>\n <Icon>\n <href>%(ty)d.%(tileformat)s</href>\n </Icon>\n <LatLonBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonBox>\n </GroundOverlay>\n\"\"\" % args\n\n for cx, cy, cz in children:\n csouth, cwest, cnorth, ceast = tileswne(cx, cy, cz)\n s += \"\"\"\n <NetworkLink>\n <name>%d/%d/%d.%s</name>\n <Region>\n <LatLonAltBox>\n <north>%.14f</north>\n <south>%.14f</south>\n <east>%.14f</east>\n <west>%.14f</west>\n </LatLonAltBox>\n <Lod>\n <minLodPixels>%d</minLodPixels>\n <maxLodPixels>-1</maxLodPixels>\n </Lod>\n </Region>\n <Link>\n <href>%s%d/%d/%d.kml</href>\n <viewRefreshMode>onRegion</viewRefreshMode>\n <viewFormat/>\n </Link>\n </NetworkLink>\n \"\"\" % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest,\n args['minlodpixels'], url, cz, cx, cy)\n\n s += \"\"\" </Document>\n</kml>\n \"\"\"\n return s",
"def graphs_kelly():\n return render_template(\"graphs-Kelly.html\")",
"def generate_leaf_kml(self, d, content=\"\"):\n return (\"\"\"\\\n <Folder>\n <Region>\n <Lod>\n <minLodPixels>%(minlodpixels)d</minLodPixels>\n <maxLodPixels>%(maxlodpixels)d</maxLodPixels>\n </Lod>\n <LatLonAltBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonAltBox>\n </Region>\n <GroundOverlay>\n <drawOrder>%(draw_order)d</drawOrder>\n <Icon>\n <href>%(image_url)s</href>\n </Icon>\n <LatLonBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonBox>\n </GroundOverlay>\"\"\" % d\n + \"\"\"\\\n%s\n </Folder>\"\"\" % content)",
"def write_kml(self,varnames):\n if type(varnames) is str:\n varnames=(varnames,)\n content=[]\n for varname in varnames:\n content.append(self.image2kml(varname))\n kml=self.__class__.kmlstr % \\\n {'content':'\\n'.join(content),\\\n 'prog':self.__class__.progname}\n f=open(self.__class__.kmlname,'w')\n f.write(kml)\n f.close()",
"def __str__(self):\n buf = []\n for var, val in self._kml.items():\n if val is not None: # Exclude all variables that are None\n if var.endswith(\"_\"):\n buf.append(\"{0}\".format(val)) # Use the variable's __str__ as is\n else:\n if var in ['name', 'description', 'text'] and Kmlable._parse: # Parse value for HTML and convert\n val = Kmlable._chrconvert(val)\n elif (var == 'href' and os.path.exists(val) and Kmlable._kmz == True)\\\n or (var == 'targetHref' and os.path.exists(val) and Kmlable._kmz == True): # Check for images\n Kmlable._addimage(val)\n val = os.path.join('files', os.path.split(val)[1])\n buf.append(u(\"<{0}>{1}</{0}>\").format(var, val)) # Enclose the variable's __str__ with its name\n # Add namespaces\n if var.startswith(\"atom:\") and 'xmlns:atom=\"http://www.w3.org/2005/Atom\"' not in Kmlable._namespaces:\n Kmlable._namespaces.append('xmlns:atom=\"http://www.w3.org/2005/Atom\"')\n elif var.startswith(\"xal:\") and 'xmlns:xal=\"urn:oasis:names:tc:ciq:xsdschema:xAL:2.0\"' not in Kmlable._namespaces:\n Kmlable._namespaces.append('xmlns:xal=\"urn:oasis:names:tc:ciq:xsdschema:xAL:2.0\"')\n return \"\".join(buf)",
"def gen_wtml(base_dir, depth, **kwargs):\n kwargs.setdefault('FolderName', 'Toasty')\n kwargs.setdefault('BandPass', 'Visible')\n kwargs.setdefault('Name', 'Toasty map')\n kwargs.setdefault('Credits', 'Toasty')\n kwargs.setdefault('CreditsUrl', 'http://github.com/ChrisBeaumont/toasty')\n kwargs.setdefault('ThumbnailUrl', '')\n kwargs['url'] = base_dir\n kwargs['depth'] = depth\n\n template = ('<Folder Name=\"{FolderName}\">\\n'\n '<ImageSet Generic=\"False\" DataSetType=\"Sky\" '\n 'BandPass=\"{BandPass}\" Name=\"{Name}\" '\n 'Url=\"{url}/{{1}}/{{3}}/{{3}}_{{2}}.png\" BaseTileLevel=\"0\" '\n 'TileLevels=\"{depth}\" BaseDegreesPerTile=\"180\" '\n 'FileType=\".png\" BottomsUp=\"False\" Projection=\"Toast\" '\n 'QuadTreeMap=\"\" CenterX=\"0\" CenterY=\"0\" OffsetX=\"0\" '\n 'OffsetY=\"0\" Rotation=\"0\" Sparse=\"False\" '\n 'ElevationModel=\"False\">\\n'\n '<Credits> {Credits} </Credits>\\n'\n '<CreditsUrl>{CreditsUrl}</CreditsUrl>\\n'\n '<ThumbnailUrl>{ThumbnailUrl}</ThumbnailUrl>\\n'\n '<Description/>\\n</ImageSet>\\n</Folder>')\n return template.format(**kwargs)",
"def get_data_mrk():\n return render_template(\"l_markers.html\")",
"def generate_garmin_kml(self, d ):\n return (\"\"\"\n <GroundOverlay>\n <Icon>\n <href>%(image_url)s</href>\n <DrawOrder>%(draw_order)d</DrawOrder>\n </Icon>\n <LatLonBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonBox>\n </GroundOverlay>\"\"\" % d )",
"def test_starting_template(checker):\n contents = labeled.contents(label=\"template\")\n _ = tomllib.loads(contents)",
"def generate(self, namespace: Optional[str], template: str, func: Callable, call_args: Dict) -> str:",
"def get_kml_dict(self, tx, ty_tms, tz, image_format, draworder = 0):\n d = {}\n\n d[\"south\"], d[\"west\"], d[\"north\"], d[\"east\"] = self.tileswne(tx, ty_tms, tz)\n\n image_filename = get_tile_filename(tx, ty_tms, tz, format_extension[image_format],False)\n d[\"image_filename\"] = image_filename\n d[\"image_filename\"] = d[\"image_filename\"].replace(\"\\\\\",\"/\")\n\n if self.options.url is None:\n d[\"image_url\"] = \"../../%s\" % image_filename\n else:\n d[\"image_url\"] = \"%s%s\" % (self.options.url, image_filename)\n d[\"image_url\"] = d[\"image_url\"].replace(\"\\\\\",\"/\")\n\n url = self.options.url\n if url is None:\n # Top level KML is linked from `doc.kml' and it needs different path.\n if tz == self.tminz:\n url = \"\"\n else:\n url = \"../../\"\n\n if self.options.kmz:\n extension = \"kmz\"\n else:\n extension = \"kml\"\n\n d[\"link_url\"] = \"%s%s\" % (url, get_tile_filename(tx, ty_tms, tz, extension,False))\n d[\"link_url\"] = d[\"link_url\"].replace(\"\\\\\",\"/\")\n\n d[\"minlodpixels\"] = int(self.tilesize / 2)\n d[\"maxlodpixels\"] = -1 # int(self.tilesize * 8)\n\n if tx == 0:\n d[\"draw_order\"] = draworder + 2 * tz + 1\n else:\n d[\"draw_order\"] = draworder + 2 * tz\n\n return d",
"def machinelearn2():\n return render_template('frontml.html')",
"def create_key(template, outtype=('nii.gz',), annotation_classes=None):\n\n if template is None or not template:\n raise ValueError('Template must be a valid format string')\n return template, outtype, annotation_classes",
"def create_key(template, outtype=('nii.gz',), annotation_classes=None):\n\n if template is None or not template:\n raise ValueError('Template must be a valid format string')\n return template, outtype, annotation_classes",
"def GenerateXML(dictionary, fileName=\"labelling.xml\") : \n root = gfg.Element(\"annotation\") \n #the big section is called Annotation\n for key in dictionary:\n #for every polygon list in inside object witho subelement name and attributes and the type \"polygon\"\n objectElement = gfg.Element(\"object\") \n root.append(objectElement) \n subElement1 = gfg.SubElement(objectElement, \"name:\".strip(\":\"))\n subElement1.text = str(dictionary[key][\"name\"])\n subElement2 = gfg.SubElement(objectElement, \"attributes\".strip(\":\"))\n subElement2.text = str(dictionary[key][\"attributes\"])\n subElement3 = gfg.SubElement(objectElement, \"polygon\")\n \n for i in range(0, len(dictionary[key])-2):\n #for every vertex of the polygon list it's rounded x, y on xml\n SubInsidePolygon = gfg.SubElement(subElement3, \"pt\")\n sub_x = gfg.SubElement(SubInsidePolygon, \"x\")\n sub_y = gfg.SubElement(SubInsidePolygon, \"y\")\n sub_x.text = str(int(round(dictionary[key][\"x_y_\" + str(i)][0])))\n sub_y.text = str(int(round(dictionary[key][\"x_y_\" + str(i)][1])))\n tree = gfg.ElementTree(root) \n #create the xml tree\n with open (fileName, \"wb\") as files : \n tree.write(files) \n #if xml does not exist create one otherwise rewrite to it",
"def template(c, release=\"url-shortener\"):\n c.run(f\"helm template {release} {HELM_CHART_DIR} > ./generated-deployment.yml\")",
"def generate_lookat_kml_block(self, lng, lat, viewrange):\n return \"\"\"\n <LookAt>\n <longitude>%.14f</longitude>\n <latitude>%.14f</latitude>\n <altitude>0</altitude>\n <range>%.f</range>\n <tilt>0</tilt>\n <heading>0</heading>\n </LookAt>\n\"\"\" % (lng, lat, viewrange)",
"def __fill_template__(self,template_file,output_fname):\n dictionary = {}\n for k,v in self.__dict__.iteritems():\n if k == 'sample_key':\n try:\n int(v)\n new_sample_key = \"Sample_\" + str(v)\n dictionary.update({k:new_sample_key})\n continue\n except ValueError:\n pass\n dictionary.update({k:str(v)})\n dictionary.update({'restats_tail': self.restats_file + '.tail'})\n with open(output_fname,'w') as f:\n string = fill_template(template_file,dictionary)\n f.write(string)",
"def image2kml(self,varname,filename=None):\n\n vdata=self.get_array(varname)\n im=self.get_image(vdata)\n if filename is None:\n filename='%s.png' % varname\n f=open(filename,'w')\n f.write(im)\n f.close()\n d=self.get_kml_dict(varname,filename)\n pylab.close('all')\n return self.__class__.kmlimage % d",
"def template1(self, width, height):\n style = '\\n'.join(self.style_lines())\n defs = '\\n'.join(self.defs_lines())\n body = '\\n'.join(self.body_lines())\n defs_block = '' if not (style or defs) else '''<defs>\n <style type=\"text/css\"><![CDATA[\n%s\\n ]]></style>\\n%s\\n</defs>''' % (style, defs)\n txt = '''<?xml version=\"1.0\" standalone=\"no\"?>\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\" \n \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n<svg width=\"%s\" height=\"%s\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\">\n%s\\n%s\\n</svg>\\n''' % (width, height, defs_block, body)\n return txt",
"def test_create_namespaced_template(self):\n pass",
"def Template(Fenetre_largeur,Fenetre_hauteur):\r\n li= Select_ligne(\"Nombre de lignes: \",Fenetre_largeur,Fenetre_hauteur)\r\n nom=\"Template\"\r\n fich=\"Template\"\r\n version=0\r\n while Path(\"stages/\"+fich+\".txt\").is_file() == True:\r\n version+=1\r\n fich=nom+str(version)\r\n fichier=open(\"stages/\"+fich+\".txt\",'w')\r\n fichier.write(str(li))\r\n fichier.write(\"\\n\")\r\n fichier.write(\"\\n\")\r\n for i in range(li):\r\n for j in range(10):\r\n fichier.write(\"0,0|\")\r\n fichier.write(\"\\n\")\r\n fichier.write(\"\\n\")\r\n fichier.write(\"gauche: resistance, droite: bonus\")\r\n fichier.write(\"\\n\")\r\n fichier.write(\"resistance max: 3\")\r\n fichier.write(\"\\n\")\r\n fichier.write(\"6=barre+\")\r\n fichier.write(\"\\n\")\r\n fichier.write(\"7=score+\")\r\n fichier.close()",
"def saved_template(self, template_id):\n\n # From user params get the wanted type and size\n category, size = template_id.split('_')\n\n # Parse the xml file\n template_tree = Etree.parse(\"patron.xml\")\n root = template_tree.getroot()\n\n # Find The selected template\n for template in root.findall(\"./type[@name='%s']/template[@size='%s']\" % (category, size)):\n # Find useful data\n info = 'T-shirt_template_%s_%s' % (category, size)\n transform = template.find('transform')\n\n # Creation of a main group for the Template\n template_attribs = {\n inkex.addNS('label', 'inkscape'): info,\n 'transform': transform.text if transform is not None else ''\n }\n template_group = inkex.etree.SubElement(self.current_layer, 'g', template_attribs)\n\n # For each pieces of the template\n for piece in template.findall('piece'):\n # Find useful data\n pieceinfo = info + \"_\" + piece.find('name').text\n transform = piece.find('transform')\n\n # Create a group for the piece\n piece_attribs = {\n inkex.addNS('label', 'inkscape'): pieceinfo,\n 'transform': transform.text if transform is not None else ''\n }\n piece_group = inkex.etree.SubElement(template_group, 'g', piece_attribs)\n\n # Add a text to display the piece info\n add_text(piece_group, pieceinfo.replace('_', ' '), piece.find('info').text, 15)\n\n # For each paths of the piece\n for part in piece.findall('part'):\n # Find useful data\n label = part.find('name').text\n partinfo = pieceinfo + \"_\" + label\n transform = part.find('transform')\n\n # Create a group for the shape\n part_attribs = {\n inkex.addNS('label', 'inkscape'): partinfo,\n 'transform': transform.text if transform is not None else ''\n }\n part_group = inkex.etree.SubElement(piece_group, 'g', part_attribs)\n\n # Add the path to the group\n style = self.normal_line if self.options.style == 'print' or label != 'offset' else self.cut_line\n path_attribs = {\n inkex.addNS('label', 'inkscape'): partinfo,\n 'style': simplestyle.formatStyle(style),\n 'd': part.find('path').text\n }\n inkex.etree.SubElement(part_group, inkex.addNS('path', 'svg'), path_attribs)",
"def create_custom_template(morph_links):\n template_text = \"<?xml version='1.0' encoding='UTF-8'?>\\n\"\n template_text += \"<templates>\\n\"\n\n template_text += \"<template name='AEtransform'>\\n\"\n for link in morph_links:\n morph_label = morph_links[link][\"Label\"]\n morph_label_ns = morph_label.replace(\" \", \"\")\n template_text += \"<attribute name='\" + morph_label_ns + \"' type='maya.double'>\\n\"\n template_text += \"<label>\" + morph_label + \"</label>\\n\"\n template_text += \"</attribute>\\n\"\n template_text += \"</template>\\n\"\n\n template_text += \"<view name='Morphs' template='AEtransform'>\\n\"\n for link in morph_links:\n groups = morph_links[link][\"Path\"].split('/')\n groups = list(filter(None, groups))\n for group in groups:\n group = group.replace(\" \", \"\")\n #template_text += \"<group name='\" + group + \"'>\\n\"\n morph_label = morph_links[link][\"Label\"]\n morph_label_ns = morph_label.replace(\" \", \"\")\n template_text += \"<property name='\" + morph_label_ns + \"'/>\\n\"\n for group in groups:\n template_text += \"\" #\"</group>\\n\"\n template_text += \"</view>\\n\"\n\n template_text += \"</templates>\\n\"\n\n template_path = os.path.abspath(\"..\\scripts\\\\AETemplates\\AEtransform.MorphsTemplate.xml\")\n template_file = open(template_path, \"w\")\n template_file.write(template_text)\n template_file.close()\n\n cmds.refreshEditorTemplates()",
"def test_create_namespaced_processed_template(self):\n pass",
"def create_html(text, template, output):\n\n # TODO uncomment this for orginal DMP format (right now difficult with differing section sizes)\n #templateLoader = jinja2.FileSystemLoader(searchpath=\"../templates/new\")\n templateLoader = jinja2.FileSystemLoader(searchpath=\"../templates\")\n templateEnv = jinja2.Environment(loader=templateLoader)\n TEMPLATE_FILE = \"template_\" + template.lower() + \".html\"\n real_template = templateEnv.get_template(TEMPLATE_FILE)\n\n outputText = real_template.render(contact=text)\n html_file = open(output + \".html\", \"w\")\n html_file.write(outputText)\n html_file.close()\n\n return output + \".html\"",
"def generate_link_kml(self, d):\n return \"\"\"\\\n <NetworkLink>\n <name>%(image_filename)s</name>\n <Region>\n <Lod>\n <minLodPixels>%(minlodpixels)d</minLodPixels>\n <maxLodPixels>-1</maxLodPixels>\n </Lod>\n <LatLonAltBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonAltBox>\n </Region>\n <Link>\n <href>%(link_url)s</href>\n <viewRefreshMode>onRegion</viewRefreshMode>\n </Link>\n </NetworkLink>\"\"\" % d",
"def render_from_lms(template_name, dictionary, context=None, namespace='main'):\r\n return render_to_string(template_name, dictionary, context, namespace=\"lms.\" + namespace)",
"def printKml (params, out = sys.stdout):\n \n out.write(\" <Placemark>\\n\")\n out.write(\" <name>\" + str(params['UserID']) + \"</name>\\n\")\n out.write(\" <description>\\n\")\n import io\n \n buf = io.StringIO()\n printHtml(params, buf)\n import cgi\n \n out.write(cgi.escape(buf.getvalue()))\n out.write(\" </description>\\n\")\n out.write(\" <styleUrl>#m_ylw-pushpin_copy0</styleUrl>\\n\")\n out.write(\" <Point>\\n\")\n out.write(\" <coordinates>\")\n out.write(str(params['longitude']))\n out.write(',')\n out.write(str(params['latitude']))\n out.write(\",0</coordinates>\\n\")\n out.write(\" </Point>\\n\")\n out.write(\" </Placemark>\\n\")",
"def get_template(self):\n return self.sep.join([self.htmls[html] for html in self.lang]).format(**self.fields)",
"def _get_uml_template(*, types: dict, type_mapping: dict, message_mapping: dict) -> str:\n relationships = []\n classes = []\n\n uml_template = \"\"\"\n digraph \"Protobuf UML class diagram\" {\n fontname = \"Bitstream Vera Sans\"\n fontsize = 8\n\n node [\n fontname = \"Bitstream Vera Sans\"\n fontsize = 8\n shape = \"record\"\n style=filled\n fillcolor=gray95\n ]\n\n edge [\n fontname = \"Bitstream Vera Sans\"\n fontsize = 8\n\n ]\n\n CLASSES\n\n RELATIONSHIPS\n }\n \"\"\"\n\n entry_index = 2\n for _type, message in types.items():\n type_template_text = StringIO()\n type_template_text.write(f\"\"\" {entry_index}[label = \"{{{_type}|\"\"\")\n fields = []\n for _field in message.fields:\n message_type = _field.message_type\n field_type = type_mapping[_field.type] # this will be 'message' if referencing another protobuf message\n\n if message_type:\n this_node = message_mapping[_type]\n that_node = message_mapping[message_type.name]\n relationships.append(f\" {this_node}->{that_node}\")\n field_type = message_type.name # so we replace the 'message' token by the actual name\n\n fields.append(f\"+ {_field.name}:{field_type}\")\n\n # add fields\n type_template_text.write(\"\\\\n\".join(fields))\n type_template_text.write(\"}\\\"]\\n\")\n entry_index += 1\n classes.append(type_template_text.getvalue())\n\n type_template_text.close()\n\n uml_template = uml_template.replace(\"CLASSES\", \"\\n\".join(classes))\n uml_template = uml_template.replace(\"RELATIONSHIPS\", \"\\n\".join(relationships))\n return uml_template",
"def generateXML(self):\n return self.formatEval(\n self.TEMPLATES[self.attrs['name']]['XML'],\n self.attrs\n )",
"def produce_13TeV_template(tag_name=\"HKHI\"):\n num_rebin = 1\n file_name = \"inputs/BkgEstimation_Lin/BkgEstimation_NONE_TOPO_PTDEP_\"+tag_name+\"_Lin.root\"\n print \"Input: \", file_name\n fin = ROOT.TFile.Open(file_name, \"read\")\n h_nom = fin.Get(\"bkg_total_gg_full\").Clone(\"bkg_nominal_old\")\n h_nom.Rebin(num_rebin)\n fout = ROOT.TFile.Open(\"hists_input_\"+tag_name+\".root\", \"recreate\")\n\n h_purity_sys = fin.Get(\"bkg_purity_syst_gg_full\").Clone(\"bkg_purity_syst_gg\")\n h_reducible_sys = fin.Get(\"bkg_reducible_syst_gg_full\").Clone(\"bkg_reducible_syst_gg\")\n h_irreducible_sys = fin.Get(\"bkg_irreducible_syst_gg_full\").Clone(\"bkg_irreducible_syst_gg\")\n h_iso_sys = fin.Get(\"bkg_iso_syst_gg_full\").Clone(\"bkg_iso_syst_gg\")\n\n #file_iso = \"isolation_sys/hist.root\"\n #fin2 = ROOT.TFile.Open(file_iso, \"read\")\n #h_iso_sys = fin2.Get(\"bkg_isolation_syst_gg\")\n ## inflat irreducible uncertainty by factor of 10\n # so that it closes to stats uncertainty in data\n sf = 1\n if INFLATE_SYS:\n sf = 10\n\n # after rebinning systematic uncertainties, need to scale down,\n # otherwise the uncertainties are inflated.\n h_purity_sys.Rebin(num_rebin).Scale(sf/num_rebin)\n h_irreducible_sys.Rebin(num_rebin).Scale(sf/num_rebin)\n h_reducible_sys.Rebin(num_rebin).Scale(sf/num_rebin)\n h_iso_sys.Rebin(num_rebin).Scale(sf/num_rebin)\n\n ## truncate the histograms to [200, 2000] GeV\n h_nom_new = truncate_hist(h_nom, \"bkg_nominal\")\n h_purity_sys_new = truncate_hist(h_purity_sys, \"h_purity_sys_new\")\n h_irreducible_sys_new = truncate_hist(h_irreducible_sys, \"h_irreducible_sys_new\")\n h_reducible_sys_new = truncate_hist(h_reducible_sys, \"h_reducible_sys_new\")\n h_iso_sys_new = truncate_hist(h_iso_sys, \"h_iso_sys_new\")\n\n #write down sys and nominal\n fout.cd()\n h_nom_new.Write()\n h_purity_sys_new.Write()\n h_reducible_sys_new.Write()\n h_irreducible_sys_new.Write()\n h_iso_sys_new.Write()\n\n h_purity_up, h_purity_down = create_sys_hist(h_nom_new, h_purity_sys_new, \"purity_sys\")\n h_purity_up.Write()\n h_purity_down.Write()\n\n h_red_up, h_red_down = create_sys_hist(h_nom_new, h_reducible_sys_new, \"reducible_sys\")\n h_red_up.Write()\n h_red_down.Write()\n\n h_irred_up, h_irred_down = create_sys_hist(h_nom_new, h_irreducible_sys_new, \"irreducible_sys\")\n h_irred_up.Write()\n h_irred_down.Write()\n\n h_iso_up, h_iso_down = create_sys_hist(h_nom_new, h_iso_sys, \"isolation_sys\")\n h_iso_up.Write()\n h_iso_down.Write()\n\n fin.Close()\n fout.Close()",
"def writer(output, output_name, output_data):\n\n kml = simplekml.Kml(name=output_name)\n for exif in output_data:\n if('Latitude' in exif.keys() and\n 'Latitude Reference' in exif.keys() and\n 'Longitude Reference' in exif.keys() and\n 'Longitude' in exif.keys()):\n\n if 'Original Date' in exif.keys():\n dt = exif['Original Date']\n else:\n dt = 'N/A'\n\n if exif['Latitude Reference'] == 'S':\n latitude = '-' + exif['Latitude']\n else:\n latitude = exif['Latitude']\n\n if exif['Longitude Reference'] == 'W':\n longitude = '-' + exif['Longitude']\n else:\n longitude = exif['Longitude']\n\n kml.newpoint(name=exif['Name'],\n description='Originally Created: ' + dt,\n coords=[(longitude, latitude)])\n else:\n pass\n kml.save(os.path.join(output, output_name))",
"def make_input_data_kmls(rundata):\n \n import os\n from . import topotools, dtopotools\n\n regions2kml(rundata, combined=False)\n gauges2kml(rundata)\n\n topofiles = rundata.topo_data.topofiles\n for f in topofiles:\n topo_file_name = f[-1]\n topo_type = f[0]\n topo2kml(topo_file_name, topo_type)\n \n dtopofiles = rundata.dtopo_data.dtopofiles\n for f in dtopofiles:\n dtopo_file_name = f[-1]\n dtopo_type = f[0]\n dtopo2kml(dtopo_file_name, dtopo_type)",
"def line2kml(xy,fname='line.kml',name='line',color='00FFFF',width=3,\n verbose=True):\n \n if type(xy[0]) is tuple:\n x1,x2 = xy[0]\n y1,y2 = xy[1]\n else:\n x1,x2,y1,y2 = xy[0:]\n\n if verbose:\n print(\"Line: %10.6f %10.6f %10.6f %10.6f\" % (x1,x2,y1,y2))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = name\n mapping['desc'] = \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\" % (f2s(y1),f2s(y2))\n mapping['color'] = color\n mapping['width'] = width\n\n region_text = kml_line(mapping)\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def generate_node_kml(self, d, children):\n return self.generate_leaf_kml(d, \"\\n\".join(children))",
"def render(self, template: str, **vars) -> str:",
"def Generate(self):\n return self.Render(self.TEMPLATE_NAME, {\n 'name': self._namespace.name,\n 'enums': self._enums,\n 'types': self._types,\n 'events': self._namespace.events,\n 'functions': self._namespace.functions,\n # TODO(sammc): Don't change years when regenerating existing output files.\n 'year': datetime.date.today().year,\n 'source_file': self._namespace.source_file,\n })",
"def kml_export_data(id_string, user, xform=None):\n\n def cached_get_labels(xpath):\n \"\"\"\n Get and Cache labels for the XForm.\n \"\"\"\n if xpath in list(labels):\n return labels[xpath]\n labels[xpath] = xform.get_label(xpath)\n\n return labels[xpath]\n\n xform = xform or XForm.objects.get(id_string=id_string, user=user)\n\n data_kwargs = {\"geom__isnull\": False}\n if xform.is_merged_dataset:\n data_kwargs.update(\n {\n \"xform_id__in\": list(\n xform.mergedxform.xforms.filter(\n deleted_at__isnull=True\n ).values_list(\"id\", flat=True)\n )\n }\n )\n else:\n data_kwargs.update({\"xform_id\": xform.pk})\n instances = Instance.objects.filter(**data_kwargs).order_by(\"id\")\n data_for_template = []\n labels = {}\n for instance in queryset_iterator(instances):\n # read the survey instances\n data_for_display = instance.get_dict()\n xpaths = list(data_for_display)\n xpaths.sort(key=cmp_to_key(instance.xform.get_xpath_cmp()))\n table_rows = [\n f\"<tr><td>{cached_get_labels(xpath) }</td>\"\n f\"<td>{data_for_display[xpath]}</td></tr>\"\n for xpath in xpaths\n if not xpath.startswith(\"_\")\n ]\n img_urls = image_urls(instance)\n\n if instance.point:\n img_url = img_urls[0] if img_urls else \"\"\n rows = \"\".join(table_rows)\n data_for_template.append(\n {\n \"name\": instance.xform.id_string,\n \"id\": instance.id,\n \"lat\": instance.point.y,\n \"lng\": instance.point.x,\n \"image_urls\": img_urls,\n \"table\": '<table border=\"1\"><a href=\"#\"><img width=\"210\" '\n f'class=\"thumbnail\" src=\"{img_url}\" alt=\"\"></a>{rows}'\n \"</table>\",\n }\n )\n\n return data_for_template",
"def create_template_dict(name, cat, boilerplate_name=None, is_common=False):\r\n return {\r\n \"display_name\": name,\r\n \"category\": cat,\r\n \"boilerplate_name\": boilerplate_name,\r\n \"is_common\": is_common\r\n }",
"def as_string(self):\r\n effects_list = self.get_effects()\r\n kwo = {}\r\n\r\n for key in self.keys:\r\n kwo[key] = get_stack(key, effects_list)\r\n\r\n template = self.get_template()\r\n\r\n templated = template.format(**kwo)\r\n\r\n return templated",
"def graphing1():\n return render_template('graph1.html')",
"def box2kml(xy,fname=None,name='box',color='FF0000',width=3,verbose=True):\n\n if fname is None:\n fname = name + '.kml'\n\n if type(xy[0]) is tuple:\n x1,x2 = xy[0]\n y1,y2 = xy[1]\n else:\n x1,x2,y1,y2 = xy[0:]\n\n if verbose:\n print(\"Box: %10.6f %10.6f %10.6f %10.6f\" % (x1,x2,y1,y2))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = name\n mapping['desc'] = \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\" % (f2s(y1),f2s(y2))\n mapping['color'] = color\n mapping['width'] = width\n\n region_text = kml_region(mapping)\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def keyholemarkup2x(file,output='df'):\n r = re.compile(r'(?<=\\.)km+[lz]?',re.I)\n try:\n extension = r.search(file).group(0) #(re.findall(r'(?<=\\.)[\\w]+',file))[-1]\n \n \n except IOError as e:\n logging.error(\"I/O error {0}\".format(e))\n if (extension.lower()=='kml') is True:\n buffer = file\n elif (extension.lower()=='kmz') is True:\n kmz = ZipFile(file, 'r')\n \n vmatch = np.vectorize(lambda x:bool(r.search(x)))\n A = np.array(kmz.namelist())\n sel = vmatch(A)\n buffer = kmz.open(A[sel][0],'r')\n \n else:\n raise ValueError('Incorrect file format entered. Please provide the '\n 'path to a valid KML or KMZ file.') \n \n \n parser = xml.sax.make_parser()\n handler = PlacemarkHandler()\n parser.setContentHandler(handler)\n parser.parse(buffer)\n \n try:\n kmz.close()\n except:\n pass\n \n df = pd.DataFrame(handler.mapping).T\n names = list(map(lambda x: x.lower(),df.columns))\n if 'description' in names:\n extradata = df.apply(PlacemarkHandler.htmlizer,axis=1)\n df = df.join(extradata)\n \n \n output = output.lower()\n \n if output=='df' or output=='dataframe' or output == None:\n result = df\n \n elif output=='csv':\n out_filename = file[:-3] + \"csv\"\n df.to_csv(out_filename,encoding='utf-8',sep=\"\\t\")\n result = (\"Successfully converted {0} to CSV and output to\"\n \" disk at {1}\".format(file,out_filename))\n \n elif output=='gpd' or output == 'gdf' or output=='geoframe' or output == 'geodataframe':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n result = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n \n \n elif output=='geojson' or output=='json':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n try:\n import geojson\n except ImportError as e:\n raise ImportError('This operation requires geojson. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"geojson\"\n gdf.to_file(out_filename,driver='GeoJSON')\n validation = geojson.is_valid(geojson.load(open(out_filename)))['valid']\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to GeoJSON and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The geojson conversion did not create a '\n 'valid geojson object. Try to clean your '\n 'data or try another file.')\n \n elif output=='shapefile' or output=='shp' or output =='esri shapefile':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n \n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n try:\n import shapefile\n except ImportError as e:\n raise ImportError('This operation requires pyshp. {0}'.format(e))\n \n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"shp\"\n gdf.to_file(out_filename,driver='ESRI Shapefile')\n sf = shapefile.Reader(out_filename)\n import shapefile\n sf = shapefile.Reader(out_filename)\n if len(sf.shapes())>0:\n validation = \"yes\"\n else:\n validation = \"no\"\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to Shapefile and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The Shapefile conversion did not create a '\n 'valid shapefile object. Try to clean your '\n 'data or try another file.') \n else:\n raise ValueError('The conversion returned no data; check if'\n ' you entered a correct output file type. '\n 'Valid output types are geojson, shapefile,'\n ' csv, geodataframe, and/or pandas dataframe.')\n \n return result",
"def kmlWriter(output_data, output_dir, output_name):\n msg = 'Writing ' + output_name + ' KML output.'\n print '[+]', msg\n logging.info(msg)\n # Instantiate a Kml object and pass along the output filename\n kml = simplekml.Kml(name=output_name)\n for exif in output_data:\n if 'Latitude' in exif.keys() and 'Latitude Reference' in exif.keys() and 'Longitude Reference' in exif.keys() and 'Longitude' in exif.keys():\n\n if 'Original Date' in exif.keys():\n dt = exif['Original Date']\n else:\n dt = 'N/A'\n\n if exif['Latitude Reference'] == 'S':\n latitude = '-' + exif['Latitude']\n else:\n latitude = exif['Latitude']\n\n if exif['Longitude Reference'] == 'W':\n longitude = '-' + exif['Longitude']\n else:\n longitude = exif['Longitude']\n\n kml.newpoint(name=exif['Name'], description='Originally Created: ' + dt,\n coords=[(longitude, latitude)])\n else:\n pass\n kml.save(os.path.join(output_dir, output_name))",
"def __repr__(self):\n template = \"\"\"\n SnakemakeRule ({})\n \n - parent_id : {}\n - input : {}\n - output : {}\n - local : {}\n - template : {}\n - params : {}\n \"\"\"\n return template.format(\n self.rule_id,\n self.parent_id,\n self.input,\n self.output,\n self.local,\n self.template,\n self.params,\n )",
"def graphing2():\n return render_template('graph2.html')",
"def run(self, template: str, art: Optional[str] = None, **kwargs: Any) -> str:\n variables = self.__dict__\n variables.update(kwargs)\n\n template = CustomFormats().format(template, **variables)\n if art:\n art = art.format(nfo=template)\n template = art\n\n for m in re.finditer(r\"<\\?([01])\\?([\\D\\d]*?)\\?>\", template):\n # TODO: This if check is quite yucky, look into alternative options.\n # Ideally a custom format spec would be great.\n template = template.replace(\n m.group(0),\n m.group(2) if int(m.group(1)) else \"\"\n )\n\n template = \"\\n\".join(map(str.rstrip, template.splitlines(keepends=False)))\n\n return template",
"def poly2kml(xy,fname=None,name='poly',color='00FF00', width=3,\n verbose=True):\n\n if fname is None:\n fname = name + '.kml'\n\n x,y = xy\n\n if verbose:\n print(\"Polygon: %10.6f %10.6f\" % (x[0],y[0]))\n for j in range(1,len(x)):\n print(\" %10.6f %10.6f\" % (x[j],y[j]))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x'] = x\n mapping['y'] = y\n mapping['elev'] = elev\n mapping['name'] = name\n d = \" x[0] = %s, y[0] = %s\\n\" % (x[0],y[0]) \n for j in range(1,len(x)):\n d = d + \" x[%i] = %s, y[%i] = %s\" % (j,f2s(x[j]),j,f2s(y[j]))\n mapping['desc'] = d\n mapping['color'] = color\n mapping['width'] = width\n\n v = \"\\n\"\n for j in range(len(x)):\n v = v + \"%s,%s,%s\\n\" % (f2s(x[j]),f2s(y[j]),f2s(elev))\n v = v + \"%s,%s,%s\\n\" % (f2s(x[0]),f2s(y[0]),f2s(elev))\n v.replace(' ','')\n \n region_text = kml_region(mapping, v)\n for j in range(1,len(x)):\n d = d + \" x[%i] = %s, y[%i] = %s\" % (j,f2s(x[j]),j,f2s(y[j]))\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def calc_template(template_def, config):\n template = Template(**template_def)\n #print \"template_def:\", template_def, \"config:\", config\n try:\n retvals = process_template(template, config, target=(None, None))\n except Exception:\n print(\"==== template ====\"); pprint(template_def)\n print(\"==== config ====\"); pprint(config)\n #traceback.print_exc()\n raise\n output = {}\n for rkey, rv in retvals.items():\n module_id, terminal_id = rkey\n module_key = str(module_id)\n output.setdefault(module_key, {})\n output[module_key][terminal_id] = rv.todict()\n return output",
"def generate(self, answers):\n\n text = self.template\n\n for (key, val) in answers.items():\n text = text.replace(\"{\" + key + \"}\", val)\n\n return text",
"def _generate_nml_from_dict(d: dict, name: str = \"input\"):\n\n def logical(b: bool):\n return \".true.\" if b else \".false.\"\n\n with tempfile.NamedTemporaryFile(prefix=\"swami_\", delete=False, suffix=\".inp\", mode=\"r+\") as nml:\n nml.write(f\"&{name}\\n\")\n for k, v in d.items():\n if isinstance(v, bool):\n nml.write(f\"{k} = {logical(v):s}\\n\")\n elif isinstance(v, str):\n nml.write(f\"{k} = '{v:s}'\\n\")\n elif isinstance(v, float):\n nml.write(f\"{k} = {v:23.16e}\\n\")\n elif isinstance(v, int):\n nml.write(f\"{k} = {v:d}\\n\")\n else:\n nml.write(f\"{k} = {v}\\n\")\n # nml.write(\"\\\\\")\n nml.write(\"&end\\n\")\n nml.close()\n\n return nml.name",
"def test_convert_csv_to_kml(self):\n import tempfile\n from pykml.util import convert_csv_to_kml\n\n # create a CSV file for testing\n csvfile = tempfile.TemporaryFile(mode='w+')\n csvfile.write('name,snippet,lat,lon\\n')\n csvfile.write('first,The first one,45.0,-90.0\\n')\n csvfile.write('second,The second one,46.0,-89.0\\n')\n csvfile.write('third,\"The third one (with quotes)\",45.0,-88.0\\n')\n csvfile.seek(0)\n\n kmlobj = convert_csv_to_kml(csvfile)\n csvfile.close()\n\n target = etree.fromstring(\n '<kml '\n 'xmlns:atom=\"http://www.w3.org/2005/Atom\" '\n 'xmlns:gx=\"http://www.google.com/kml/ext/2.2\" '\n 'xmlns=\"http://www.opengis.net/kml/2.2\">'\n '<Document>'\n '<Folder>'\n '<name>KmlFile</name>'\n '<Placemark>'\n '<name>first</name>'\n '<Snippet maxLines=\"2\">The first one</Snippet>'\n '<description>'\n '<![CDATA['\n '<table border=\"1\"'\n '<tr><th>name</th><td>first</td></tr>'\n '<tr><th>snippet</th><td>The first one</td></tr>'\n '<tr><th>lat</th><td>45.0</td></tr>'\n '<tr><th>lon</th><td>-90.0</td></tr>'\n '</table>'\n ']]>'\n '</description>'\n '<Point>'\n '<coordinates>-90.0,45.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '<Placemark>'\n '<name>second</name>'\n '<Snippet maxLines=\"2\">The second one</Snippet>'\n '<description><![CDATA[<table border=\"1\"<tr><th>name</th><td>second</td></tr><tr><th>snippet</th><td>The second one</td></tr><tr><th>lat</th><td>46.0</td></tr><tr><th>lon</th><td>-89.0</td></tr></table>]]></description>'\n '<Point>'\n '<coordinates>-89.0,46.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '<Placemark>'\n '<name>third</name>'\n '<Snippet maxLines=\"2\">The third one (with quotes)</Snippet>'\n '<description><![CDATA[<table border=\"1\"<tr><th>name</th><td>third</td></tr><tr><th>snippet</th><td>The third one (with quotes)</td></tr><tr><th>lat</th><td>45.0</td></tr><tr><th>lon</th><td>-88.0</td></tr></table>]]></description>'\n '<Point>'\n '<coordinates>-88.0,45.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '</Folder>'\n '</Document>'\n '</kml>'\n )\n self.assertTrue(compare_xml(target, kmlobj))",
"def open_airspace_format_2_kml(self, source_file_txt):\n # load template for kml file\n self.load_kml_template(self.full_path_kml_template)\n # load airspace source\n self.load_airspace_open_air_format(source_file_txt)\n\n self.kml_lines = self.kml_template['header']\n self.kml_lines.extend(self.kml_template['good_subdivided']['head'])\n # collect all A and B kml lines\n kml_A = []\n kml_B = []\n # transform airspaces and attach to A and B collect-lists\n for airspace in self.airspaces:\n airspace.make_kml_format(self.kml_template)\n if airspace.as_type == 'A':\n kml_A.extend(airspace.kml_lines)\n if airspace.as_type == 'B':\n kml_B.extend(airspace.kml_lines)\n\n self.kml_lines.extend(kml_A)\n self.kml_lines.extend(self.kml_template['good_subdivided']['tail'])\n # start B part\n self.kml_lines.extend(self.kml_template['bad_subdivided']['head'])\n self.kml_lines.extend(kml_B)\n self.kml_lines.extend(self.kml_template['bad_subdivided']['tail'])\n\n full_path_kml = source_file_txt[:-4] + '_converted.kml'\n # uisave dialog\n full_path_kml = filesavebox(default=full_path_kml, filetypes=\"*.kml\")\n if full_path_kml is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n # write to file\n f = open(full_path_kml, 'w')\n f.writelines(self.kml_lines)\n f.close()\n print('Resulting KML files was saved to: %s' % full_path_kml)",
"def make_cake_templates():\n tmpl = dict()\n\n # Attributes\n tmpl['Cooking time'] = ConditionTemplate(\n name=\"Cooking time\",\n description=\"The time elapsed during a cooking process\",\n bounds=RealBounds(0, 7 * 24.0, \"hr\")\n )\n tmpl[\"Oven temperature setting\"] = ParameterTemplate(\n name=\"Oven temperature setting\",\n description=\"Where the knob points\",\n bounds=RealBounds(0, 2000.0, \"K\")\n )\n tmpl[\"Oven temperature\"] = ConditionTemplate(\n name=\"Oven temperature\",\n description=\"Actual temperature measured by the thermocouple\",\n bounds=RealBounds(0, 2000.0, \"K\")\n )\n\n tmpl[\"Tastiness\"] = PropertyTemplate(\n name=\"Tastiness\",\n description=\"Yumminess on a fairly arbitrary scale\",\n bounds=IntegerBounds(lower_bound=1, upper_bound=10)\n )\n\n # Objects\n tmpl[\"Baking in an oven\"] = ProcessTemplate(\n name=\"Baking in an oven\",\n description='Using heat to promote chemical reactions in a material',\n allowed_labels=['precursor'],\n conditions=[(tmpl[\"Oven temperature\"], RealBounds(0, 700, \"degF\"))],\n parameters=[(tmpl[\"Oven temperature setting\"], RealBounds(100, 550, \"degF\"))]\n )\n\n tmpl[\"Taste test\"] = MeasurementTemplate(\n name=\"Taste test\",\n properties=[tmpl[\"Tastiness\"]]\n )\n\n tmpl[\"Dessert\"] = MaterialTemplate(\n name=\"Dessert\",\n properties=[tmpl[\"Tastiness\"]]\n )\n\n tmpl[\"Generic Material\"] = MaterialTemplate(name=\"Generic\")\n tmpl[\"Icing\"] = ProcessTemplate(name=\"Icing\",\n description='Applying a coating to a substrate',\n allowed_labels=['coating', 'substrate'])\n tmpl[\"Mixing\"] = ProcessTemplate(name=\"Mixing\",\n description='Physically combining ingredients',\n allowed_labels=['wet', 'dry', 'leavening', 'seasoning',\n 'sweetener', 'shortening', 'flavoring'])\n tmpl[\"Procurement\"] = ProcessTemplate(name=\"Procurement\",\n description=\"Buyin' stuff\")\n\n return tmpl",
"def _front_matter_tokens_render(node: RenderTreeNode, context: RenderContext) -> str:\n dct = {}\n for child in node.children:\n path = child.meta[\"key_path\"]\n value = (\n \"\\n\\n\".join(subchild.render(context) for subchild in child.children)\n if child.children\n else True\n )\n subdct = dct\n for key in path[:-1]:\n subdct.setdefault(key, {})\n subdct = subdct[key]\n subdct[path[-1]] = value\n text = yaml_dump(dct).rstrip()\n return f\"---\\n{text}\\n---\"",
"def render_knowl_in_template(knowl_content, **kwargs):\n render_me = u\"\"\"\\\n {%% include \"knowl-defs.html\" %%}\n {%% from \"knowl-defs.html\" import KNOWL with context %%}\n {%% from \"knowl-defs.html\" import KNOWL_LINK with context %%}\n {%% from \"knowl-defs.html\" import KNOWL_INC with context %%}\n {%% from \"knowl-defs.html\" import TEXT_DATA with context %%}\n\n %(content)s\n \"\"\"\n knowl_content = md_preprocess(knowl_content)\n\n # markdown enabled\n render_me = render_me % {'content': md.convert(knowl_content)}\n # Pass the text on to markdown. Note, backslashes need to be escaped for\n # this, but not for the javascript markdown parser\n try:\n return render_template_string(render_me, **kwargs)\n except Exception as e:\n return \"ERROR in the template: %s. Please edit it to resolve the problem.\" % e",
"def make(self, width=1500.0, height=1000.0):\n return self._meta.template1(width, height).encode('utf-8')",
"def __init__(self, full_path_of_source=''):\n if len(full_path_of_source) == 0:\n full_path_of_source = fileopenbox(default=os.path.curdir, filetypes=[\"*.txt\", \"*.kml\"])\n if full_path_of_source is None:\n print('Airspace conversion was aborted by the user')\n quit()\n # set template (this should not be changed)\n self.full_path_kml_template = r'Thermal_Map_Template5.kml' # set template file here: Folder must be named \"good\" and \"bad\"\n\n self.airspaces = [] # airspace container\n self.kml_template = {'header': [], 'good': [], 'bad': [], # will be filled after loading template\n 'good_subdivided': {'head':[], 'placemark': [], 'tail': []},\n 'bad_subdivided': {'head':[], 'placemark': [], 'tail': []}}\n self.txt_lines = [] # airspace file in open airspace format\n self.kml_lines = [] # airspace file in kml format\n \"\"\" handle conversion from and to KML / airspace format\"\"\"\n if full_path_of_source.lower().endswith('.kml'):\n self.kml_2_open_airspace_and_json_format(full_path_of_source)\n if full_path_of_source.lower().endswith('.txt'):\n self.open_airspace_format_2_kml(full_path_of_source)\n self.plot_all() # works for now only for TXT input",
"def build_maps():\n return render_template(\"maps.html\")",
"def facility_as_kml(facility):\n return KML.Placemark(\n KML.name(facility.nrqz_id),\n KML.Point(KML.coordinates(f\"{facility.location.x},{facility.location.y}\")),\n )",
"def __init__(self, template):\n\n self.template = template\n self.parsed_template = {}",
"def test_template_output(self):\n g = microformats.models.geo()\n g.latitude = 37.408183\n g.latitude_description = 'N 37° 24.491'\n g.longitude = -122.13855\n g.longitude_description = 'W 122° 08.313'\n g.save()\n hc = microformats.models.hCard()\n hc.honorific_prefix = 'Mr'\n hc.given_name = 'Joe'\n hc.additional_name = 'Arthur'\n hc.family_name = 'Blogs'\n hc.honorific_suffix = 'PhD'\n hc.url = 'http://acme.com/'\n hc.email_work = '[email protected]'\n hc.email_home = '[email protected]'\n hc.tel_work = '+44(0)1234 567890'\n hc.tel_home = '+44(0)1324 234123'\n hc.street_address = '5445 N. 27th Street'\n hc.extended_address = ''\n hc.locality = 'Milwaukee'\n hc.region = 'WI'\n hc.country_name = 'US'\n hc.postal_code = '53209'\n hc.org = \"Acme Corp.\"\n hc.title = 'Vice President'\n hc.save()\n hcl = microformats.models.hCalendar()\n hcl.summary = 'Important Meeting'\n hcl.location = 'BBC in London'\n hcl.url = 'http://www.bbc.co.uk/'\n hcl.dtstart = datetime.datetime(2009, 4, 11, 13, 30)\n hcl.dtend = datetime.datetime(2009, 4, 11, 15, 30)\n hcl.description = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.'\n hcl.street_address = 'Broadcasting House'\n hcl.extended_address = 'Portland Place'\n hcl.locality = 'London'\n hcl.region = ''\n hcl.country_name = 'GB'\n hcl.postal_code = 'W1A 1AA'\n hcl.save()\n u = User.objects.create_user('john', '[email protected]', 'password')\n URL = 'http://twitter.com/ntoll'\n tgt = 'Nicholas Tollervey'\n x = microformats.models.xfn()\n x.source = u\n x.target = tgt \n x.url = URL\n x.save()\n xfnv1 = microformats.models.xfn_values.objects.get(value='friend')\n xfnv2 = microformats.models.xfn_values.objects.get(value='met')\n xfnv3 = microformats.models.xfn_values.objects.get(value='colleague')\n x.relationships.add(xfnv1)\n x.relationships.add(xfnv2)\n x.relationships.add(xfnv3)\n x.save()\n g2 = microformats.models.geo()\n g2.latitude = 45.498677\n g2.latitude_description = \"45°34' 13\"\" N\"\n g2.longitude = -73.570260 \n g2.longitude_description = \"73°29' 55\"\" W\" \n g2.save()\n hc2 = microformats.models.hCard()\n hc2.honorific_prefix = 'Mr'\n hc2.given_name = 'John'\n hc2.additional_name = ''\n hc2.family_name = 'Fletcher'\n hc2.honorific_suffix = 'MA(cantab)'\n hc2.url = 'http://lso.co.uk/'\n hc2.tel_work = '+44(0)1234 567456'\n hc2.street_address = 'The Barbican Centre'\n hc2.extended_address = 'Silk Street'\n hc2.locality = 'London'\n hc2.country_name = 'GB'\n hc2.postal_code = 'EC2Y 8DS'\n hc2.org = 'London Symphony Orchestra'\n hc2.title = 'Principal Tuba Player'\n hc2.save()\n hcl2 = microformats.models.hCalendar()\n hcl2.summary = 'Operation Overlord'\n hcl2.location = 'Normandy, France'\n hcl2.url = 'http://en.wikipedia.org/wiki/Operation_Overlord'\n hcl2.dtstart = datetime.datetime(1944, 6, 6)\n hcl2.dtend = datetime.datetime(1944, 8, 30)\n hcl2.description = 'You are about to embark upon the Great Crusade, toward which we have striven these many months. The eyes of the world are upon you. The hopes and prayers of liberty-loving people everywhere march with you. In company with our brave Allies and brothers-in-arms on other Fronts, you will bring about the destruction of the German war machine, the elimination of Nazi tyranny over the oppressed peoples of Europe, and security for ourselves in a free world.'\n hcl2.save()\n listing = microformats.models.hListing()\n listing.listing_action = \"sell\"\n listing.summary = \"Pony requires a good home\"\n listing.description = \"A young pony who answers to the name Django\"\\\n \" requires a new home having outgrown his current host. Easy\"\\\n \" going and fun to play with Django also provides rainbow\"\\\n \" manure that is sure to help the garden grow.\"\n listing.lister_fn = \"John Doe\"\n listing.lister_email = \"[email protected]\"\n listing.lister_url = \"http://isp.com/django_the_pony\"\n listing.lister_tel = \"+44(0) 1234 567456\"\n listing.dtlisted = datetime.datetime(2009, 5, 6)\n listing.dtexpired = datetime.datetime(2009, 8, 19)\n listing.price = \"£2500 ono\"\n listing.item_fn = \"Django the Pony\"\n listing.item_url = \"http://djangoproject.com/\"\n listing.locality = \"Brighton\"\n listing.country_name = \"GB\"\n listing.save()\n rev1 = microformats.models.hReview()\n rev1.summary=\"Acme's new services rock!\"\n rev1.type='business'\n rev1.description='Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat.'\n rev1.rating=4\n rev1.dtreviewed=datetime.datetime(2009,4,10)\n rev1.reviewer='John Smith'\n rev1.fn='Acme Corp'\n rev1.url='http://acme.com'\n rev1.tel='+44(0)1234 567456'\n rev1.street_address = '5445 N. 27th Street'\n rev1.extended_address = ''\n rev1.locality = 'Milwaukee'\n rev1.region = 'WI'\n rev1.country_name = 'US'\n rev1.postal_code = '53209'\n rev1.save()\n rev2 = microformats.models.hReview()\n rev2.summary = 'A phenomenal tuba recital'\n rev2.description = 'Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat.'\n rev2.rating=5\n rev2.type='event'\n rev2.reviewer='John Doe'\n rev2.fn='John Fletcher - One man and his Tuba'\n rev2.url='http://www.johnfletcher-tuba.co.uk/'\n rev2.dtstart = datetime.datetime(1987, 10, 3, 19, 30)\n rev2.street_address = 'The Pro Arte Theatre'\n rev2.locality = 'London'\n rev2.save()\n rev3 = microformats.models.hReview()\n rev3.summary = \"Mr Bloggs children's entertainer flops\"\n rev3.description = 'Claritas est etiam processus dynamicus, qui sequitur mutationem consuetudium lectorum. Mirum est notare quam littera gothica, quam nunc putamus parum claram, anteposuerit litterarum formas humanitatis per seacula quarta decima et quinta decima. Eodem modo typi, qui nunc nobis videntur parum clari, fiant sollemnes in futurum.'\n rev3.rating=2\n rev3.type='person'\n rev3.reviewer='Melvyn Bragg'\n rev3.fn='Mr Bloggs'\n rev3.tel='01234 567456'\n rev3.save()\n rev4 = microformats.models.hReview()\n rev4.summary = 'Latest Star-Wars is Sucko-Barfo'\n rev4.description = 'Claritas est etiam processus dynamicus, qui sequitur mutationem consuetudium lectorum. Mirum est notare quam littera gothica, quam nunc putamus parum claram, anteposuerit litterarum formas humanitatis per seacula quarta decima et quinta decima. Eodem modo typi, qui nunc nobis videntur parum clari, fiant sollemnes in futurum.'\n rev4.rating=1\n rev4.type='film'\n rev4.reviewer='Barry Norman'\n rev4.fn='Star Wars - Revenge of the Sith'\n rev4.url='http://www.starwars.com/movies/episode-iii/'\n rev4.save()\n rev5 = microformats.models.hReview()\n rev5.rating=1\n rev5.type='film'\n rev5.fn='Star Wars - The Phantom Menace'\n rev5.save()\n feed = microformats.models.hFeed()\n feed.save()\n entry1 = microformats.models.hEntry()\n entry1.hfeed = feed\n entry1.entry_title = 'Entry 1 Title'\n entry1.entry_content = 'Claritas est etiam processus dynamicus, qui sequitur mutationem consuetudium lectorum. Mirum est notare quam littera gothica, quam nunc putamus parum claram, anteposuerit litterarum formas humanitatis per seacula quarta decima et quinta decima. Eodem modo typi, qui nunc nobis videntur parum clari, fiant sollemnes in futurum.'\n entry1.entry_summary = 'Lorem ipsum dolor sit amet doo-dah whatsit thingymajig'\n entry1.author = 'A.N.Other'\n entry1.bookmark = 'http://website.com/entry1'\n entry1.updated = datetime.datetime(2009, 6, 1)\n entry1.save()\n entry2 = microformats.models.hEntry()\n entry2.hfeed = feed\n entry2.entry_title = 'Entry 2 Title'\n entry2.entry_content = 'Claritas est etiam processus dynamicus, qui sequitur mutationem consuetudium lectorum. Mirum est notare quam littera gothica, quam nunc putamus parum claram, anteposuerit litterarum formas humanitatis per seacula quarta decima et quinta decima. Eodem modo typi, qui nunc nobis videntur parum clari, fiant sollemnes in futurum.'\n entry2.entry_summary = 'Lorem ipsum dolor sit amet doo-dah whatsit thingymajig'\n entry2.author = 'Sidney Humphries'\n entry2.bookmark = 'http://website.com/entry2'\n entry2.updated = datetime.datetime(2009, 3, 14)\n entry2.save()\n entry3 = microformats.models.hEntry()\n entry3.hfeed = feed\n entry3.entry_title = 'Entry 3 Title'\n entry3.entry_content = 'Claritas est etiam processus dynamicus, qui sequitur mutationem consuetudium lectorum. Mirum est notare quam littera gothica, quam nunc putamus parum claram, anteposuerit litterarum formas humanitatis per seacula quarta decima et quinta decima. Eodem modo typi, qui nunc nobis videntur parum clari, fiant sollemnes in futurum.'\n entry3.entry_summary = 'Lorem ipsum dolor sit amet doo-dah whatsit thingymajig'\n entry3.author = 'Nicholas Hawkesmoor'\n entry3.bookmark = 'http://website.com/entry3'\n entry3.updated = datetime.datetime(2008, 12, 28)\n entry3.save()\n entry4 = microformats.models.hEntry()\n entry4.entry_title = 'Entry 4 Title'\n entry4.entry_content = 'Claritas est etiam processus dynamicus, qui sequitur mutationem consuetudium lectorum. Mirum est notare quam littera gothica, quam nunc putamus parum claram, anteposuerit litterarum formas humanitatis per seacula quarta decima et quinta decima. Eodem modo typi, qui nunc nobis videntur parum clari, fiant sollemnes in futurum.'\n entry4.entry_summary = 'Lorem ipsum dolor sit amet doo-dah whatsit thingymajig'\n entry4.author = 'Fred Blogs'\n entry4.bookmark = 'http://website.com/entry4'\n entry4.updated = datetime.datetime(2008, 11, 15)\n entry4.save()\n item1 = microformats.models.hNews()\n item1.hfeed = feed\n item1.entry_title = 'L.A. Icon Otis Chandler Dies at 78'\n item1.entry_content = 'Otis Chandler, whose vision and determination as publisher of the Los Angeles Times from 1960 to 1980 catapulted the paper from mediocrity into the front ranks of American journalism, died today of a degenerative illness called Lewy body disease. He was 78.'\n item1.entry_summary = 'An obituary of Los Angeles Times Publisher Otis Chandler'\n item1.author = 'David Shaw and Mitchell Landsberg'\n item1.bookmark = 'http://www.latimes.com/news/local/la-me-chandler-obit,0,7195252.story'\n item1.updated = datetime.datetime(2006, 2, 27)\n item1.source_org = 'Los Angeles Times'\n item1.source_url = 'http://www.latimes.com'\n item1.principles_url = 'http://www.latimes.com/news/nationworld/nation/la-0705lat_ethics_code-pdf,0,7257671.acrobat'\n item1.license_url = 'http://www.latimes.com/services/site/lat-terms,0,6713384.htmlstory'\n item1.license_description = 'Terms of service'\n item1.locality = 'Los Angeles'\n item1.country_name = 'US'\n item1.longitude = -118.2666667\n item1.latitude = 34.0444444\n item1.save()\n\n # All the data is defined so lets render the test template...\n template = get_template('test.html')\n data = {\n 'contact': hc,\n 'loc': g,\n 'event': hcl, \n 'listing': listing,\n 'review1': rev1,\n 'review2': rev2,\n 'review3': rev3,\n 'review4': rev4,\n 'review5': rev5,\n 'person': x,\n 'c2': hc2,\n 'loc2': g2,\n 'event2': hcl2,\n 'feed': feed,\n 'entry': entry4,\n 'item': item1,\n }\n context = Context(data)\n import html_test\n path = os.path.dirname(html_test.__file__)\n outfile = codecs.open(os.path.join(path, 'microformat_test.html'), 'w', 'utf-8')\n outfile.write(template.render(context))\n outfile.close()",
"def __str__(self):\n t = Template(\n \"\"\"\n <h4>$title</h4>\n $imgs\n $footnotes\n <hr/>\"\"\")\n # Return result.\n return t.substitute({\n \"title\": self.title,\n \"imgs\": self.render_images(),\n \"footnotes\": self.render_footnotes()\n })",
"def export_kml(self, kmz=False):\n orderby = self.orderby.get()\n currentregion = self.region.get()\n if kmz:\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".kmz\",\n filetypes=((\"keyhole markup language\", \"*.kmz\"),\n (\"All Files\", \"*.*\")))\n else:\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".kml\",\n filetypes=((\"keyhole markup language\", \"*.kml\"),\n (\"All Files\", \"*.*\")))\n if outputfile:\n self.tabs.window.aistracker.create_kml_map(\n outputfile, kmzoutput=kmz, orderby=orderby,\n region=currentregion)\n else:\n raise ExportAborted('Export cancelled by user.')",
"def get_default_template(env):\n return env.from_string(\n \"\"\"\\\n{% if record.standard_information and record.filename_information %}\n0|{{ prefix }}{{ record.path }}|{{ record.inode }}|0|{{ record.standard_information.owner_id }}|0|{{ record.size }}|{{ record.standard_information.accessed|unixtimestampformat }}|{{ record.standard_information.modified|unixtimestampformat }}|{{ record.standard_information.changed|unixtimestampformat }}|{{ record.standard_information.created|unixtimestampformat }}\n{% endif %}\n{% if record.standard_information and record.filename_information %}\n0|{{ prefix }}{{ record.path }} (filename)|{{ record.inode }}|0|{{ record.standard_information.owner_id }}|0|{{ record.size }}|{{ record.filename_information.accessed|unixtimestampformat }}|{{ record.filename_information.modified|unixtimestampformat }}|{{ record.filename_information.changed|unixtimestampformat }}|{{ record.filename_information.created|unixtimestampformat }}\n{% endif %}\n{% for e in record.indx_entries %}\n0|{{ prefix }}{{ record.path }}\\\\{{ e.name }} (INDX)|{{ e.inode }}|0|0|0|{{ e.logical_size }}|{{ e.accessed|unixtimestampformat }}|{{ e.modified|unixtimestampformat }}|{{ e.changed|unixtimestampformat }}|{{ e.created|unixtimestampformat }}\n{% endfor %}\n{% for e in record.slack_indx_entries %}\n0|{{ prefix }}{{ record.path }}\\\\{{ e.name }} (slack-INDX)|{{ e.inode }}|0|0|0|{{ e.logical_size }}|{{ e.accessed|unixtimestampformat }}|{{ e.modified|unixtimestampformat }}|{{ e.changed|unixtimestampformat }}|{{ e.created|unixtimestampformat }}\n{% endfor %}\n\"\"\"\n )",
"def apply_rdf_template(filename, results):\n rdf_mapping_schema = open(filename, \"r\").read()\n\n # Rename all variables according to the 'collection_field' format\n # Pass a function instead of a pattern to sub()\n f = lambda p: f\"${{result['{get_collection_name(p.group(1))}_{p.group(2)}']}}\"\n s = re.sub(r'\\{(.*?)\\.(.*?)\\}', f, rdf_mapping_schema)\n #print(s)\n\n #save rdf turtle prefixes in a string\n prefixes = \"\"\n for result in re.findall(r'\\@.*?[\\r\\n]', s):\n prefixes += result\n \n #remove rdf turtle prefixes from template\n s = re.sub(r'\\@.*?[\\r\\n]', \"\", s)\n #print(s)\n\n #split triples\n s = re.findall(r'\\<.*? \\.', s)\n #print(s)\n \n template_string = \"% for result in data:\\n\" \n for triple in s:\n #find all references to result\n data = re.findall(r\"\\['(.*?)'\\]\", triple)\n #print(data)\n #check if all references to result exists as keys in results, \n #only try to create this triple for this result if they do\n template_string += \" % if set(\" + str(data) + \").issubset(set(result.keys())): \\n\"\n template_string += \" \" + triple + \"\\n\"\n template_string += \" % endif\\n\"\n template_string += \"% endfor\"\n # Check out the generated template\n print(\"Template string:\")\n print(template_string)\n print()\n\n template = Template(template_string)\n rdf_data = template.render(data=results)\n\n with open(\"./rdf.nt\", \"w\") as file:\n file.write(prefixes)\n file.write(rdf_data)\n\n g = rdflib.Graph()\n g.parse(\"rdf.nt\", format='n3')\n\n #import pprint\n #for stmt in g:\n # pprint.pprint(stmt)\n\n return rdf_data",
"def _template(inlist):\n from collections import OrderedDict\n if isinstance(inlist, str):\n inlist = [inlist]\n\n templates = []\n for item in reversed(inlist):\n templates.append(output_space(item))\n\n return OrderedDict(reversed(OrderedDict(templates).items()))",
"def makeTemplate(m, wl, ell, maxEll, outputFile = None):\n ell = numpy.array(ell)\n wl = numpy.array(wl)\n ft = fftTools1.fftFromLiteMap(m)\n #print \"max_lx, max_ly\", ft.lx.max(), ft.ly.max()\n #print \"m_dx, m_dy\", m.pixScaleX, m.pixScaleY\n #print \"m_nx, m_ny\", m.Nx, m.Ny\n l_f = numpy.floor(ft.modLMap)\n l_c = numpy.ceil(ft.modLMap)\n ft.kMap[:] = 0 \n for i in xrange(numpy.shape(ft.kMap)[0]):\n for j in xrange(numpy.shape(ft.kMap)[1]):\n if l_f[i,j] > maxEll or l_c[i,j] > maxEll:\n continue\n w_lo = wl[l_f[i,j]]\n w_hi = wl[l_c[i,j]]\n trueL = ft.modLMap[i,j]\n w = (w_hi-w_lo)*(trueL - l_f[i,j]) + w_lo\n ft.kMap[i,j] = w\n\n m = m.copy()\n m.data[:] = 0.\n m.data = abs(ft.kMap)\n if outputFile != None:\n m.writeFits(outputFile, overWrite = True)\n return m",
"def generate(self):\n\n letter = ''\n\n for template_name in self.templates:\n template = self.templates[template_name]\n\n # Process all replacements ({...} syntax).\n replacements = re.finditer(REPLACEMENT_REGEX, template)\n for replacement in replacements:\n match = replacement.group()\n key = replacement.group(1)\n\n template = template.replace(match, self._lookup(key))\n\n # Process all conditionals (<...> syntax).\n conditionals = re.finditer(CONDITIONAL_REGEX, template)\n for conditional in conditionals:\n match = conditional.group()\n\n # Process each condition within the conditional ([...]... syntax).\n conditions = re.finditer(CONDITION_REGEX, match)\n for index, condition in enumerate(conditions):\n skill_type = condition.group(2)\n skill = condition.group(3)\n text = condition.group(4)\n\n # If the skill is empty, treat it as a catch all case.\n if not skill or skill in self._lookup(skill_type):\n template = template.replace(match, text)\n break\n\n letter += template\n\n return letter",
"def site2nrml(model, params_dict): \n \"\"\"\n # Some XML definitions\n NAMESPACE = 'http://openquake.org/xmlns/nrml/0.4'\n GML_NAMESPACE = 'http://www.opengis.net/gml'\n SERIALIZE_NS_MAP = {None: NAMESPACE, 'gml': GML_NAMESPACE} \n gml_ns = SERIALIZE_NS_MAP['gml']\n \"\"\"\n \n # Head matter \n root = etree.Element(_tag='nrml', nsmap={'gml': 'http://www.opengis.net/gml'})\n root.set('xmlns', 'http://openquake.org/xmlns/nrml/0.4')\n root.append(etree.Comment('%s' % '%s site model' %(model)))\n \n\n # Define Site Model Name \n sMod = etree.SubElement(root, \"siteModel\")\n sMod.set('name', model + ' Site Model')\n \n # Define sub element\n \n for key in params_dict:\n \n site = etree.SubElement(sMod, \"site\")\n site.set('lon', '%s' % key[0])\n site.set('lat', '%s' % key[1])\n site.set('vs30', '%s' % params_dict[key][0])\n site.set('vs30Type', '%s' % 'inferred')\n site.set('z1pt0', '%s' % '%3.3f' % float(params_dict[key][1]))\n site.set('z2pt5', '%s' % '%3.3f' % float(params_dict[key][2]))\n \n #print(getMinMax(params_dict))\n \n # Form tree and write to xml\n root_tree = etree.ElementTree(root)\n outFile = open((out_directory + '/' + out_filename), 'wb')\n root_tree.write(outFile, encoding=\"utf-8\", xml_declaration=True, pretty_print=True)",
"def parse_template(self):\n for line in self.raw_template.split(\"\\n\"):\n line = line.strip()\n if line.startswith('#m3'):\n key, val = line[3:].strip().split('=', 1)\n key = key.strip()\n val = val.strip()\n self.variables[key] = val\n\n for fitem in self.finditem.finditer(self.raw_template):\n fgrp = fitem.groups()\n categ = fgrp[0]\n name = fgrp[1]\n rest_str = fgrp[2]\n rest = {} # type: dict\n for item in rest_str.split('|'):\n item = item.strip()\n if item:\n key, val = item.split('=')\n rest[key] = val\n\n self.data[name] = (categ, rest)",
"def cheetah_template(self, pre=False):\n if self.is_req_output:\n cht_tmpl = self.req_out_chth\n return cht_tmpl.substitute(self.xml_out)\n elif self.is_output:\n xml_out = self.xml_out\n xml_out['out_sel_name'] = self.out_sel_name\n cht_tmpl = self.file_chth\n return cht_tmpl.substitute(self.xml_out)\n elif self.is_input and not pre:\n if self.pname in self.gen_in_fmt:\n if self.gen_in_fmt[self.pname] == 'vcf,vcf_bgzip':\n cht_tmpl = self.vcf_choose\n else:\n cht_tmpl = PercentTemplate(self.reg_arg)\n elif self.pname in self.tool_data[self.tool_name]['input_fmt']:\n cht_tmpl = self.req_out_chth\n return cht_tmpl.substitute(self.xml_out)\n elif self.is_input and pre:\n cht_tmpl = self.vcf_tabix\n return cht_tmpl.substitute(self.xml_out)\n else:\n if self.xml_out['section'] not in ['required']:\n template_string = self.ext_arg\n else:\n template_string = self.reg_arg\n if self.xml_out['type'] == 'boolean':\n cht_tmpl = PercentTemplate(template_string.replace('%argument ', ''))\n else:\n cht_tmpl = PercentTemplate(template_string)\n return cht_tmpl.substitute(self.xml_out)",
"def generate(self, diagram):",
"def read_template():\n\n text_msg = \"\"\"${PERSON_NAME} - Calling Campaign Summary - ${DATE}:\\n\n Total Called = ${TOTAL_CALLED}\\n\n Answered = ${ANSWERED}\\n\n Not Answered = ${NOT_ANSWERED}\\n\n Declines = ${DECLINES}\\n\n Remaining = ${REMAINING}\\n\n \\n\n Thank You.\"\"\"\n\n return Template(text_msg)",
"def wkt(self): # -> str:\n ...",
"def CreateKmlDoc():\n\n kml_doc = xml.dom.minidom.Document()\n kml_element = kml_doc.createElementNS('http://www.opengis.net/kml/2.2', 'kml')\n kml_element.setAttribute('xmlns', 'http://www.opengis.net/kml/2.2')\n kml_element = kml_doc.appendChild(kml_element)\n document = kml_doc.createElement('Document')\n kml_element.appendChild(document)\n return kml_doc",
"def formatted_label_string(self):\n return Template(self.zpl_template.template).safe_substitute(self.label_context)",
"def _text(self, template, **kw):\n ns = dict()\n ns['csv'] = _args_to_csv\n ns['f'] = _Namespace(kw)\n return Template(template).render(**ns)",
"def template_node(scope_key):\n node = TemplateNode()\n node.scope_key = scope_key\n return node",
"def parse_template(data, template):\n img_html = \"\"\"<div class=\"thumb-wrap\"><div class=\"thumb-holder\"></div><a href=\"{{URL}}\" target=\"_top\"><div class=\"thumb-img\" style=\"background-image:url('{{IMG}}');\"></div></a></div>\"\"\"\n template = template.replace('{{URL}}', data['link'].replace('http:','https:'))\n template = template.replace('{{URLX}}', data['link'])\n template = template.replace('{{TITLE}}', data['title'])\n #template = template.replace('{{BLURB}}', data['summary'])\n img_html = img_html.replace('{{URL}}', data['link'].replace('http:','https:'))\n if hasattr(data, 'tags') and len(data['tags']) > 0:\n template = template.replace('{{SECTION}}', data['tags'][0]['term'])\n else:\n template = template.replace('<h2><a href=\"{{URL}}\" target=\"_top\">{{SECTION}}</a></h2>', '')\n if hasattr(data, 'media_content') and len(data['media_content']) > 0:\n template = template.replace('{{IMG}}', '%s?w=150' % data['media_content'][0]['url'].replace('http:','https:'))\n else:\n template = template.replace(img_html, '')\n\n return template",
"def makepkl():\n # Old osgeo.ogr approach\n from osgeo import ogr\n # USTimeZones.kml source is unknown, but was freely available and\n # Has been converted to a pkl file\n kmlpath = os.path.join(os.path.dirname(__file__), 'USTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(uspklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(uspklpath, 'w'))\n\n # WorldTimeZones.kml source is below and was freely available and\n # Has been converted to a pkl file\n # https://productforums.google.com/forum/?fromgroups=#!msg/gec-tools/EdR18tz_5k8/MRPV85OxXIkJ\n kmlpath = os.path.join(os.path.dirname(__file__), 'WorldTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(worldpklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(worldpklpath, 'w'))",
"def render (self, fname, destpath='.'):\n\n outf = os.path.join(self.stpath, destpath, fname)\n\n if not os.path.isdir(os.path.dirname(outf)):\n os.makedirs(os.path.dirname(outf), 0o0755)\n\n with open(os.path.join('templates', fname), 'r') as tf:\n tmpl = Template(tf.read())\n with open(outf, 'w') as of:\n of.write(tmpl.substitute(self.kv))\n\n self.add_file(outf)",
"def saveKML(kmlFile):\n\n tilePath = os.path.basename('map-NYC_heatmap.png')\n north = topLeftLat\n south = bottomRightLat\n east = topLeftLon\n west = bottomRightLon\n \n bytes = KML % (tilePath, north, south, east, west)\n file(kmlFile, \"w\").write(bytes)",
"def markov_story():\n return render_template(\"markovstory.html\")",
"def html_template_file(self):\n pass",
"def get_configuration_template(self):\n return {'EXAMPLE_KEY_1': \"Example value\",\n 'EXAMPLE_KEY_2': [\"Example\", \"Value\"]\n }",
"def render(template, variables={}):\r\n\treturn prettify( parse(template).render(dict(variables.items())) )",
"def _commandTemplate(self, command:dict) -> ET.Element:\n\t\tresult = ET.Element('Command')\n\t\tfor key,value in command.items():\n\t\t\tET.SubElement(result,key).text = value\n\t\treturn result",
"def generate_haproxy_config(template=None, instances=None):\n\n return Template(filename=template).render(instances=instances)",
"def shn_compose_message(data, template):\n\n if data:\n root = etree.Element(\"message\")\n for k in data.keys():\n entry = etree.SubElement(root, k)\n entry.text = s3xrc.xml.xml_encode(str(data[k]))\n\n message = None\n tree = etree.ElementTree(root)\n\n if template:\n template = os.path.join(request.folder, \"static\", template)\n if os.path.exists(template):\n message = s3xrc.xml.transform(tree, template)\n\n if message:\n return str(message)\n else:\n return s3xrc.xml.tostring(tree, pretty_print=True)",
"def _generate_template(dictionary):\n task_dict = dict(dictionary)\n lines = []\n for key in sorted(TaskInfo._READ_ONLY_FIELDS):\n if key not in task_dict:\n continue\n\n value = TaskInfo._dpop(task_dict, key)\n lines.extend([\n \"# {}:\".format(key),\n \"# {}\".format(\"\\n#\".join(value.splitlines())),\n \"\",\n ])\n\n for key in sorted(task_dict.keys()):\n lines.extend([\n \"{}:\".format(key),\n str(task_dict[key]),\n \"\",\n ])\n\n return \"\\n\".join(lines)",
"def get_configuration_template(self):\n return {'EXAMPLE_KEY_1': \"Example value\",\n 'EXAMPLE_KEY_2': [\"Example\", \"Value\"]\n }",
"def logging_template():\n template = (\n '[loggers]\\n'\n 'keys=root\\n'\n '\\n'\n '[handlers]\\n'\n 'keys=consoleHandler\\n'\n '\\n'\n '[formatters]\\n'\n 'keys=simpleFormatter\\n'\n '\\n'\n '[logger_root]\\n'\n 'level=DEBUG\\n'\n 'handlers=consoleHandler\\n'\n '\\n'\n '[handler_consoleHandler]\\n'\n 'class=StreamHandler\\n'\n 'level=DEBUG\\n'\n 'formatter=simpleFormatter\\n'\n 'args=(sys.stdout,)\\n'\n '\\n'\n '[formatter_simpleFormatter]\\n'\n 'format=%(asctime)s - %(name)s - %(levelname)s - %(message)s\\n'\n 'datefmt=\\n')\n return template",
"def get_config_template(self) -> cconfig.Config:",
"def generate():",
"def _template_kwargs(*, logical_name: str, bucket: str, key: str) -> Dict[str, str]:\n if logical_name == \"ArtifactBuilder\":\n return dict(ArtifactBucketName=bucket, WorkersS3Key=key)\n elif logical_name == \"LayerBuilder\":\n return dict(ReplicationBucket=bucket, WorkersS3Key=key)\n else:\n raise ValueError(f\"Unknown logical name: {logical_name}\")",
"def user_create_yaml(self):\n pass"
] | [
"0.65473634",
"0.6186442",
"0.59212077",
"0.58994913",
"0.5786561",
"0.5749856",
"0.57343155",
"0.5694609",
"0.56501126",
"0.5589115",
"0.5586314",
"0.5547935",
"0.5522636",
"0.5514267",
"0.5514267",
"0.54982287",
"0.54740417",
"0.54516935",
"0.5447252",
"0.54235315",
"0.54164743",
"0.54114723",
"0.53889513",
"0.53844714",
"0.5379983",
"0.53570396",
"0.53538585",
"0.5350417",
"0.53376365",
"0.5326569",
"0.5305268",
"0.52842325",
"0.52777165",
"0.52731675",
"0.52325004",
"0.5227299",
"0.5226126",
"0.5212817",
"0.52066123",
"0.51969117",
"0.5195894",
"0.5190837",
"0.5183072",
"0.5179163",
"0.51769966",
"0.51699084",
"0.51517504",
"0.51351094",
"0.51253116",
"0.51199585",
"0.51158446",
"0.5108204",
"0.5094819",
"0.50909436",
"0.5081748",
"0.507889",
"0.50760865",
"0.50760597",
"0.50742936",
"0.507115",
"0.5066546",
"0.50575197",
"0.5050098",
"0.5049731",
"0.5047464",
"0.50323296",
"0.50309664",
"0.5030826",
"0.5027179",
"0.49914417",
"0.4987351",
"0.4985033",
"0.49828267",
"0.49783108",
"0.4973658",
"0.4973335",
"0.49704158",
"0.49700364",
"0.49672297",
"0.496351",
"0.49611413",
"0.4944378",
"0.49440974",
"0.49395502",
"0.49391687",
"0.49386987",
"0.49355796",
"0.49269646",
"0.49229938",
"0.49181214",
"0.49146456",
"0.4913518",
"0.4907462",
"0.49050704",
"0.48997733",
"0.4894565",
"0.48940098",
"0.48932528",
"0.48911938",
"0.48905587"
] | 0.6787827 | 0 |
Plots airspace into figure, reuse open figure if available | def plot(self):
# determine color
if self.as_type=='A':
color4plot = 'g'
elif self.as_type == 'B':
color4plot = 'b'
else:
color4plot = 'k'
# plot
plt.fill(self.lon_dec,self.lat_dec,facecolor=color4plot) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def show_graphs ():\n plt.ylim = (0, 300)\n plt.xlim = (0, 300)\n #Set up lidar plot to figure 1\n lidar_plot = plt.figure (1)\n #Assign title\n plt.title ('Lidar data')\n #Assign data\n plt.imshow (lidar_clean)\n #Set up radar plot to figure 2\n radar_plot = plt.figure (2)\n #Assign title\n plt.title ('Radar data')\n #Assign data\n plt.imshow (radar_clean)\n #Show plots\n plt.show ()",
"def assemblePlot(self):\n self.clearPlot()\n self.axes = self.figure.add_subplot(111)\n\n # Reset handles\n self._fluxOverlayHandles = []\n self._magneticAxisHandle = None\n self._orbitHandles = []\n self._separatrixOverlayHandle = None\n self._wallCrossSectionOverlayHandle = None\n\n # Plot image\n self.plotEq()\n\n # Plot overlays\n self.plotOverlays()\n\n self.adjustAxes()",
"def setplot(plotdata):\n#-------------------------- \n\n\n from pyclaw.plotters import colormaps, geoplot\n\n plotdata.clearfigures() # clear any old figures,axes,items data\n\n def set_drytol(current_data):\n # The drytol parameter is used in masking land and water and\n # affects what color map is used for cells with small water depth h.\n # The cell will be plotted as dry if h < drytol.\n # The best value to use often depends on the application and can\n # be set here (measured in meters):\n current_data.user.drytol = 1.e-4\n\n\n plotdata.beforeframe = set_drytol\n\n # To plot gauge locations on pcolor or contour plot, use this as\n # an afteraxis function:\n\n def addgauges(current_data):\n from pyclaw.plotters import gaugetools\n gaugetools.plot_gauge_locations(current_data.plotdata, \\\n gaugenos='all', format_string='ko', add_labels=True)\n \n\n #-----------------------------------------\n # Figure for imshow plot\n #-----------------------------------------\n plotfigure = plotdata.new_plotfigure(name='imshow', figno=0)\n plotfigure.show = False\n\n # Set up for axes in this figure:\n plotaxes = plotfigure.new_plotaxes('imshow')\n plotaxes.title = 'Surface'\n plotaxes.scaled = True\n\n # Water\n plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')\n #plotitem.plot_var = geoplot.surface\n plotitem.plot_var = geoplot.surface_or_depth\n plotitem.imshow_cmap = geoplot.tsunami_colormap\n plotitem.imshow_cmin = -0.02\n plotitem.imshow_cmax = 0.02\n plotitem.add_colorbar = True\n plotitem.amr_gridlines_show = [0,0,0]\n plotitem.amr_gridedges_show = [1]\n\n # Land\n plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')\n plotitem.plot_var = geoplot.land\n plotitem.imshow_cmap = geoplot.land_colors\n plotitem.imshow_cmin = 0.0\n plotitem.imshow_cmax = 0.05\n plotitem.add_colorbar = False\n plotitem.amr_gridlines_show = [0,0,0]\n plotaxes.xlimits = 'auto'\n plotaxes.ylimits = 'auto'\n\n \n\n #-----------------------------------------\n # Figure for imshow plot\n #-----------------------------------------\n plotfigure = plotdata.new_plotfigure(name='Surface and Gauge 1', figno=20)\n\n # Set up for axes in this figure:\n plotaxes = plotfigure.new_plotaxes('imshow')\n plotaxes.axescmd = \"axes([.1,.5,.8,.4])\"\n plotaxes.title = 'Surface'\n plotaxes.scaled = True\n plotaxes.afteraxes = addgauges\n\n # Water\n plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')\n #plotitem.plot_var = geoplot.surface\n plotitem.plot_var = geoplot.surface_or_depth\n plotitem.imshow_cmap = geoplot.tsunami_colormap\n plotitem.imshow_cmin = -0.03\n plotitem.imshow_cmax = 0.03\n plotitem.add_colorbar = True\n plotitem.amr_gridlines_show = [0,0,0]\n plotitem.amr_gridedges_show = [1]\n\n # Land\n plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')\n plotitem.plot_var = geoplot.land\n plotitem.imshow_cmap = geoplot.land_colors\n plotitem.imshow_cmin = 0.0\n plotitem.imshow_cmax = 0.05\n plotitem.add_colorbar = False\n plotitem.amr_gridlines_show = [0,0,0]\n plotaxes.xlimits = 'auto'\n plotaxes.ylimits = 'auto'\n\n\n # Gauge trace:\n plotaxes = plotfigure.new_plotaxes()\n plotaxes.show = False\n plotaxes.axescmd = \"axes([.1,.1,.8,.3])\"\n plotaxes.xlimits = 'auto'\n plotaxes.ylimits = [-0.02, 0.05]\n plotaxes.title = 'Gauge 1'\n\n # Plot surface as blue curve:\n plotitem = plotaxes.new_plotitem(plot_type='1d_gauge_trace')\n plotitem.plot_var = 3\n plotitem.plotstyle = 'b-'\n plotitem.gaugeno = 1\n\n\n #-----------------------------------------\n # Figure for zoom\n #-----------------------------------------\n plotfigure = plotdata.new_plotfigure(name='Zoom', figno=10)\n #plotfigure.show = False\n plotfigure.kwargs = {'figsize':[7,7]}\n\n # Set up for axes in this figure:\n plotaxes = plotfigure.new_plotaxes('monai')\n #plotaxes.axescmd = 'axes([0.0,0.1,0.6,0.6])'\n plotaxes.title = 'Monai Valley'\n plotaxes.scaled = True\n #plotaxes.xlimits = [4.0, 5.2]\n #plotaxes.ylimits = [1.3, 2.5]\n plotaxes.xlimits = [4.7, 5.2]\n plotaxes.ylimits = [1.5, 2.2]\n\n # Water\n plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')\n #plotitem.plot_var = geoplot.surface\n plotitem.plot_var = geoplot.surface_or_depth\n plotitem.imshow_cmap = geoplot.tsunami_colormap\n plotitem.imshow_cmin = -0.02\n plotitem.imshow_cmax = 0.02\n plotitem.add_colorbar = False\n plotitem.amr_gridlines_show = [0]\n plotitem.amr_gridedges_show = [1]\n\n # Land\n plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')\n plotitem.plot_var = geoplot.land\n plotitem.imshow_cmap = geoplot.land_colors\n plotitem.imshow_cmin = 0.0\n plotitem.imshow_cmax = 0.05\n plotitem.add_colorbar = False\n plotitem.amr_gridlines_show = [0]\n\n # Add contour lines of bathymetry:\n plotitem = plotaxes.new_plotitem(plot_type='2d_contour')\n plotitem.plot_var = geoplot.topo\n from numpy import arange, linspace\n plotitem.contour_levels = arange(-0.02, 0., .0025)\n plotitem.amr_contour_colors = ['k'] # color on each level\n plotitem.kwargs = {'linestyles':'solid'}\n plotitem.amr_contour_show = [1] # show contours only on finest level\n plotitem.gridlines_show = 0\n plotitem.gridedges_show = 0\n plotitem.show = True\n \n # Add contour lines of topography:\n plotitem = plotaxes.new_plotitem(plot_type='2d_contour')\n plotitem.plot_var = geoplot.topo\n from numpy import arange, linspace\n plotitem.contour_levels = arange(0., .2, .01)\n plotitem.amr_contour_colors = ['w'] # color on each level\n plotitem.kwargs = {'linestyles':'solid'}\n plotitem.amr_contour_show = [1] # show contours only on finest level\n plotitem.gridlines_show = 0\n plotitem.gridedges_show = 0\n plotitem.show = True\n\n # Add dashed contour line for current shoreline\n plotitem = plotaxes.new_plotitem(plot_type='2d_contour')\n plotitem.plot_var = 0\n plotitem.contour_levels = [0.002]\n plotitem.amr_contour_colors = ['k'] # color on each level\n plotitem.kwargs = {'linestyles':'dashed','linewidths':2}\n plotitem.amr_contour_show = [1] # show contours only on finest level\n plotitem.gridlines_show = 0\n plotitem.gridedges_show = 0\n plotitem.show = True\n\n\n\n\n #-----------------------------------------\n # Figures for gauges\n #-----------------------------------------\n plotfigure = plotdata.new_plotfigure(name='Surface & topo', figno=300, \\\n type='each_gauge')\n\n # Set up for axes in this figure:\n plotaxes = plotfigure.new_plotaxes()\n plotaxes.xlimits = [0,25]\n plotaxes.ylimits = [-0.02, 0.05]\n plotaxes.title = 'Surface'\n\n # Plot surface as blue curve:\n plotitem = plotaxes.new_plotitem(plot_type='1d_plot')\n plotitem.plot_var = 3\n plotitem.plotstyle = 'b-'\n\n # Plot topo as green curve:\n plotitem = plotaxes.new_plotitem(plot_type='1d_plot')\n\n def gaugetopo(current_data):\n q = current_data.q\n h = q[:,0]\n eta = q[:,3]\n topo = eta - h\n return topo\n \n plotitem.plot_var = gaugetopo\n plotitem.clf_each_gauge = False\n plotitem.plotstyle = 'g-'\n def afteraxes(current_data):\n from pylab import plot, legend, loadtxt\n t = current_data.t\n plot(t, 0*t, 'k')\n gaugeno = current_data.gaugeno\n \n if gaugeno in [5,7,9]:\n col = (gaugeno-3)/2\n plot(labgage[:,0],0.01*labgage[:,col],'r')\n legend(('GeoClaw','topography','sea level','lab data'),loc='upper left')\n else:\n legend(('GeoClaw','topography','sea level'),loc='upper right')\n \n \n\n plotaxes.afteraxes = afteraxes\n\n\n #-----------------------------------------\n # Figure for grids alone\n #-----------------------------------------\n plotfigure = plotdata.new_plotfigure(name='grids', figno=2)\n plotfigure.show = False\n\n # Set up for axes in this figure:\n plotaxes = plotfigure.new_plotaxes()\n plotaxes.xlimits = [0,1]\n plotaxes.ylimits = [0,1]\n plotaxes.title = 'grids'\n plotaxes.scaled = True\n\n # Set up for item on these axes:\n plotitem = plotaxes.new_plotitem(plot_type='2d_grid')\n plotitem.amr_grid_bgcolor = ['#ffeeee', '#eeeeff', '#eeffee']\n plotitem.amr_gridlines_show = [1,1,0] \n plotitem.amr_gridedges_show = [1] \n\n #-----------------------------------------\n \n # Parameters used only when creating html and/or latex hardcopy\n # e.g., via pyclaw.plotters.frametools.printframes:\n\n plotdata.printfigs = True # print figures\n plotdata.print_format = 'png' # file format\n #plotdata.print_framenos = [4,6,8,10,12]\n plotdata.print_framenos = [5,7,9,11,13]\n plotdata.print_gaugenos = [0,5,7,9] # list of gauges to print\n plotdata.print_fignos = 'all' # list of figures to print\n plotdata.html = True # create html files of plots?\n plotdata.html_homelink = '../README.html' # pointer for top of index\n plotdata.latex = True # create latex file of plots?\n plotdata.latex_figsperline = 2 # layout of plots\n plotdata.latex_framesperline = 1 # layout of plots\n plotdata.latex_makepdf = False # also run pdflatex?\n\n return plotdata",
"def run(self):\n # fill the x_values,y_values,z_values dictionaries\n if not self.__fillCoordinatesFromSource():\n self.raiseAWarning('Nothing to Plot Yet. Returning.')\n return\n\n self.counter += 1\n if self.counter > 1:\n self.actcm = None\n clusterDict = deepcopy(self.outStreamTypes)\n\n # start plotting.... loop over the plots that need to be included in this figure\n for pltIndex in range(len(self.outStreamTypes)):\n plotSettings = self.options['plotSettings']['plot'][pltIndex]\n if 'gridLocation' in plotSettings:\n x = None\n y = None\n if 'x' in plotSettings['gridLocation']:\n x = list(map(int, plotSettings['gridLocation']['x'].strip().split(' ')))\n else:\n x = None\n if 'y' in plotSettings['gridLocation'].keys():\n y = list(map(int, plotSettings['gridLocation']['y'].strip().split(' ')))\n else:\n y = None\n if pltIndex == 0:\n self.ax.remove() # remove axis so that there is not an extra axis on plot with subplots\n if (len(x) == 1 and len(y) == 1):\n if self.dim == 2:\n self.ax = self.fig.add_subplot(self.gridSpace[x[0], y[0]])\n else:\n self.ax = self.fig.add_subplot(self.gridSpace[x[0], y[0]], projection='3d')\n elif (len(x) == 1 and len(y) != 1):\n if self.dim == 2:\n self.ax = self.fig.add_subplot(self.gridSpace[x[0], y[0]:y[-1]])\n else:\n self.ax = self.fig.add_subplot(self.gridSpace[x[0], y[0]:y[-1]], projection='3d')\n elif (len(x) != 1 and len(y) == 1):\n if self.dim == 2:\n self.ax = self.fig.add_subplot(self.gridSpace[x[0]:x[-1], y[0]])\n else:\n self.ax = self.fig.add_subplot(self.gridSpace[x[0]:x[-1], y[0]], projection='3d')\n else:\n if self.dim == 2:\n self.ax = self.fig.add_subplot(self.gridSpace[x[0]:x[-1], y[0]:y[-1]])\n else:\n self.ax = self.fig.add_subplot(self.gridSpace[x[0]:x[-1], y[0]:y[-1]], projection='3d')\n\n if 'gridSpace' in self.options['plotSettings']:\n self.ax.locator_params(axis='y', nbins=4)\n self.ax.locator_params(axis='x', nbins=2)\n if 'range' in plotSettings:\n axes_range = plotSettings['range']\n if 'ymin' in axes_range:\n self.ax.set_ylim(bottom=ast.literal_eval(axes_range['ymin']))\n if 'ymax' in axes_range:\n self.ax.set_ylim(top=ast.literal_eval(axes_range['ymax']))\n if 'xmin' in axes_range:\n self.ax.set_xlim(left=ast.literal_eval(axes_range['xmin']))\n if 'xmax' in axes_range:\n self.ax.set_xlim(right=ast.literal_eval(axes_range['xmax']))\n if self.dim == 3:\n if 'zmin' in axes_range.options['plotSettings']['plot'][pltIndex]:\n if 'zmax' not in axes_range.options['plotSettings']:\n self.raiseAWarning('zmin inputted but not zmax. zmin ignored! ')\n else:\n self.ax.set_zlim(bottom=ast.literal_eval(axes_range['zmin']), top=ast.literal_eval(self.options['plotSettings']['zmax']))\n if 'zmax' in axes_range:\n if 'zmin' not in axes_range:\n self.raiseAWarning('zmax inputted but not zmin. zmax ignored! ')\n else:\n self.ax.set_zlim(bottom=ast.literal_eval(axes_range['zmin']), top=ast.literal_eval(axes_range['zmax']))\n if 'xlabel' not in plotSettings:\n self.ax.set_xlabel('x')\n else:\n self.ax.set_xlabel(plotSettings['xlabel'])\n if 'ylabel' not in plotSettings:\n self.ax.set_ylabel('y')\n else:\n self.ax.set_ylabel(plotSettings['ylabel'])\n if 'zlabel' in plotSettings:\n if self.dim == 2:\n self.raiseAWarning('zlabel keyword does not make sense in 2-D Plots!')\n elif self.dim == 3 and self.zCoordinates:\n self.ax.set_zlabel(plotSettings['zlabel'])\n elif self.dim == 3 and self.zCoordinates:\n self.ax.set_zlabel('z')\n else:\n if 'xlabel' not in self.options['plotSettings']:\n self.ax.set_xlabel('x')\n else:\n self.ax.set_xlabel(self.options['plotSettings']['xlabel'])\n if 'ylabel' not in self.options['plotSettings']:\n self.ax.set_ylabel('y')\n else:\n self.ax.set_ylabel(self.options['plotSettings']['ylabel'])\n if 'zlabel' in self.options['plotSettings']:\n if self.dim == 2:\n self.raiseAWarning('zlabel keyword does not make sense in 2-D Plots!')\n elif self.dim == 3 and self.zCoordinates:\n self.ax.set_zlabel(self.options['plotSettings']['zlabel'])\n elif self.dim == 3 and self.zCoordinates:\n self.ax.set_zlabel('z')\n\n if 'legend' in self.options['plotSettings']:\n if 'label' not in plotSettings.get('attributes', {}):\n if 'attributes' not in plotSettings:\n plotSettings['attributes'] = {}\n plotSettings['attributes']['label'] = self.outStreamTypes[pltIndex] + ' ' + str(pltIndex)\n #################\n # SCATTER PLOT #\n #################\n self.raiseADebug(f'creating plot {self.name}')\n if self.outStreamTypes[pltIndex] == 'scatter':\n if 's' not in plotSettings:\n plotSettings['s'] = '20'\n if 'c' not in plotSettings:\n plotSettings['c'] = 'b'\n if 'marker' not in plotSettings:\n plotSettings['marker'] = 'o'\n if 'alpha' not in plotSettings:\n plotSettings['alpha'] = 'None'\n if 'linewidths' not in plotSettings:\n plotSettings['linewidths'] = 'None'\n if self.colorMapCoordinates[pltIndex] is not None:\n # Find the max and min colormap values\n firstKey = utils.first(self.xValues[pltIndex].keys())\n vmin = np.amin(self.colorMapValues[pltIndex][firstKey])\n vmax = np.amax(self.colorMapValues[pltIndex][firstKey])\n for key in self.xValues[pltIndex]:\n vmin = min(vmin,np.amin(self.colorMapValues[pltIndex][key]))\n vmax = max(vmax,np.amax(self.colorMapValues[pltIndex][key]))\n plotSettings['norm'] = matplotlib.colors.Normalize(vmin,vmax)\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n for yIndex in range(len(self.yValues[pltIndex][key])):\n scatterPlotOptions = {'s': ast.literal_eval(plotSettings['s']),\n 'marker': (plotSettings['marker']),\n 'alpha': ast.literal_eval(plotSettings['alpha']),\n 'linewidths': ast.literal_eval(plotSettings['linewidths'])}\n if self.colorMapCoordinates[pltIndex] is not None:\n scatterPlotOptions['norm'] = plotSettings['norm']\n scatterPlotOptions.update(plotSettings.get('attributes', {}))\n if self.dim == 2:\n if self.colorMapCoordinates[pltIndex] is not None:\n scatterPlotOptions['c'] = self.colorMapValues[pltIndex][key][xIndex]\n scatterPlotOptions['cmap'] = matplotlib.cm.get_cmap(\"winter\")\n if self.actcm:\n first = False\n else:\n first = True\n if plotSettings['cmap'] == 'None':\n self.actPlot = self.ax.scatter(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n **scatterPlotOptions)\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if first:\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_array(self.colorMapValues[pltIndex][key])\n self.actcm = self.fig.colorbar(m)\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n try:\n self.actcm.draw_all()\n # this is not good, what exception will be thrown?\n except:\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_array(self.colorMapValues[pltIndex][key])\n self.actcm = self.fig.colorbar(m)\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n scatterPlotOptions['cmap'] = plotSettings['cmap']\n self.actPlot = self.ax.scatter(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n **scatterPlotOptions)\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if first:\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_array(self.colorMapValues[pltIndex][key])\n self.actcm = self.fig.colorbar(m, ax=self.ax)\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_clim(vmin = min(self.colorMapValues[pltIndex][key][-1]), vmax=max(self.colorMapValues[pltIndex][key][-1]))\n self.actcm.draw_all()\n else:\n if 'color' not in scatterPlotOptions:\n scatterPlotOptions['c'] = plotSettings['c']\n self.actPlot = self.ax.scatter(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n **scatterPlotOptions)\n else:\n for zIndex in range(len(self.zValues[pltIndex][key])):\n if self.colorMapCoordinates[pltIndex] is not None:\n scatterPlotOptions['c'] = self.colorMapValues[pltIndex][key][zIndex]\n if self.actcm:\n first = False\n else:\n first = True\n if plotSettings['cmap'] == 'None':\n self.actPlot = self.ax.scatter(self.xValues[pltIndex][key][xIndex], self.yValues[pltIndex][key][yIndex], self.zValues[pltIndex][key][zIndex], **scatterPlotOptions)\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if first:\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_array(self.colorMapValues[pltIndex][key])\n self.actcm = self.fig.colorbar(m)\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n self.actcm.draw_all()\n else:\n scatterPlotOptions['cmap'] = plotSettings['cmap']\n self.actPlot = self.ax.scatter(self.xValues[pltIndex][key][xIndex], self.yValues[pltIndex][key][yIndex], self.zValues[pltIndex][key][zIndex], **scatterPlotOptions)\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if first:\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_array(self.colorMapValues[pltIndex][key])\n self.actcm = self.fig.colorbar(m)\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n m = matplotlib.cm.ScalarMappable(cmap = self.actPlot.cmap, norm = self.actPlot.norm)\n m.set_clim(vmin = min(self.colorMapValues[pltIndex][key][-1]), vmax=max(self.colorMapValues[pltIndex][key][-1]))\n self.actcm.draw_all()\n else:\n if 'color' not in scatterPlotOptions:\n scatterPlotOptions['c'] = plotSettings['c']\n self.actPlot = self.ax.scatter(self.xValues[pltIndex][key][xIndex], self.yValues[pltIndex][key][yIndex], self.zValues[pltIndex][key][zIndex], **scatterPlotOptions)\n #################\n # LINE PLOT #\n #################\n elif self.outStreamTypes[pltIndex] == 'line':\n minV = 0\n maxV = 0\n # If the user does not define an appropriate cmap, then use matplotlib's default.\n if 'cmap' not in plotSettings or plotSettings['cmap'] not in matplotlib.cm.datad:\n plotSettings['cmap'] = None\n if bool(self.colorMapValues):\n for key in self.xValues[pltIndex]:\n minV = min(minV,self.colorMapValues[pltIndex][key][-1][-1])\n maxV = max(maxV,self.colorMapValues[pltIndex][key][-1][-1])\n cmap = matplotlib.cm.ScalarMappable(matplotlib.colors.Normalize(minV, maxV, True), plotSettings['cmap'])\n cmap.set_array([minV,maxV])\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n if self.colorMapCoordinates[pltIndex] is not None:\n plotSettings['interpPointsX'] = str(max(200, len(self.xValues[pltIndex][key][xIndex])))\n for yIndex in range(len(self.yValues[pltIndex][key])):\n if self.dim == 2:\n if self.yValues[pltIndex][key][yIndex].size < 2:\n return\n xi, yi = mathUtils.interpolateFunction(self.xValues[pltIndex][key][xIndex], self.yValues[pltIndex][key][yIndex], plotSettings, returnCoordinate=True)\n if self.colorMapCoordinates[pltIndex] is not None:\n self.ax.plot(xi, yi, c=cmap.cmap(self.colorMapValues[pltIndex][key][-1][-1]/(maxV-minV)))\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if self.actcm is None:\n self.actcm = self.fig.colorbar(cmap)\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n self.actcm.draw_all()\n else:\n self.actPlot = self.ax.plot(xi, yi, **plotSettings.get('attributes', {}))\n else:\n for zIndex in range(len(self.zValues[pltIndex][key])):\n if self.zValues[pltIndex][key][zIndex].size <= 3:\n return\n if self.colorMapCoordinates[pltIndex] is not None:\n self.ax.plot(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n self.zValues[pltIndex][key][zIndex],\n c=cmap.cmap(self.colorMapValues[pltIndex][key][-1][-1]/(maxV-minV)))\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if self.actcm is None:\n self.actcm = self.fig.colorbar(cmap)\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n self.actcm.draw_all()\n else:\n self.actPlot = self.ax.plot(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n self.zValues[pltIndex][key][zIndex],\n **plotSettings.get('attributes', {}))\n ##################\n # HISTOGRAM PLOT #\n ##################\n elif self.outStreamTypes[pltIndex] == 'histogram':\n if 'bins' not in plotSettings:\n if self.dim == 2:\n plotSettings['bins'] = '10'\n else:\n plotSettings['bins'] = '4'\n if 'normed' not in plotSettings:\n plotSettings['normed'] = 'False'\n if 'weights' not in plotSettings:\n plotSettings['weights'] = 'None'\n if 'cumulative' not in plotSettings:\n plotSettings['cumulative'] = 'False'\n if 'histtype' not in plotSettings:\n plotSettings['histtype'] = 'bar'\n if 'align' not in plotSettings:\n plotSettings['align'] = 'mid'\n if 'orientation' not in plotSettings:\n plotSettings['orientation'] = 'vertical'\n if 'rwidth' not in plotSettings:\n plotSettings['rwidth'] = 'None'\n if 'log' not in plotSettings:\n plotSettings['log'] = 'None'\n if 'color' not in plotSettings:\n plotSettings['color'] = 'b'\n if 'stacked' not in plotSettings:\n plotSettings['stacked'] = 'None'\n if self.sourceData[0].type.strip() == 'HistorySet':\n #####################################################################################################################################\n # @MANDD: This 'if' condition has been added in order to allow the user the correctly create an histogram out of an historySet #\n # If the histogram is created out of the input variables, then the plot has an identical meaning of the one generated by a pointSet #\n # However, if the histogram is created out of the output variables, then the plot consider only the last value of the array #\n #####################################################################################################################################\n data = {}\n data['x'] = np.empty(0)\n data['y'] = np.empty(0)\n for index in range(len(self.outStreamTypes)):\n for key in self.xValues[index]:\n data['x'] = np.append(data['x'], self.xValues[index][key][0][-1])\n if self.dim == 3:\n data['y'] = np.append(data['y'], self.yValues[index][key][0][-1])\n del self.xValues[index]\n self.xValues = {}\n self.xValues[index] = {}\n self.xValues[index][0] = []\n self.xValues[index][0].append(deepcopy(data['x']))\n if self.dim == 3:\n del self.yValues[index]\n self.yValues = {}\n self.yValues[index] ={ }\n self.yValues[index][0] = []\n self.yValues[index][0].append(deepcopy(data['y']))\n\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n try:\n colorss = ast.literal_eval(plotSettings['color'])\n # unknown what specific error is anticipated here, but I don't like a bare except...\n # ast.literal_eval can raise the exceptions listed below (see library docs):\n except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError):\n colorss = plotSettings['color']\n if self.dim == 2:\n self.ax.hist(self.xValues[pltIndex][key][xIndex],\n bins=ast.literal_eval(plotSettings['bins']),\n density=ast.literal_eval(plotSettings['normed']),\n weights=ast.literal_eval(plotSettings['weights']),\n cumulative=ast.literal_eval(plotSettings['cumulative']),\n histtype=plotSettings['histtype'],\n align=plotSettings['align'],\n orientation=plotSettings['orientation'],\n rwidth=ast.literal_eval(plotSettings['rwidth']),\n log=ast.literal_eval(plotSettings['log']),\n color=colorss,\n stacked=ast.literal_eval(plotSettings['stacked']),\n **plotSettings.get('attributes', {}))\n else:\n for yIndex in range(len(self.yValues[pltIndex][key])):\n hist, xedges, yedges = np.histogram2d(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n bins=ast.literal_eval(plotSettings['bins']))\n elements = (len(xedges) - 1) * (len(yedges) - 1)\n if 'x_offset' in plotSettings:\n xoffset = float(plotSettings['x_offset'])\n else:\n xoffset = 0.25\n if 'y_offset' in plotSettings:\n yoffset = float(plotSettings['y_offset'])\n else:\n yoffset = 0.25\n if 'dx' in plotSettings:\n dxs = float(plotSettings['dx'])\n else:\n dxs = (self.xValues[pltIndex][key][xIndex].max() - self.xValues[pltIndex][key][xIndex].min()) / float(plotSettings['bins'])\n if 'dy' in plotSettings:\n dys = float(plotSettings['dy'])\n else:\n dys = (self.yValues[pltIndex][key][yIndex].max() - self.yValues[pltIndex][key][yIndex].min()) / float(plotSettings['bins'])\n xpos, ypos = np.meshgrid(xedges[:-1] + xoffset, yedges[:-1] + yoffset)\n self.actPlot = self.ax.bar3d(xpos.flatten(),\n ypos.flatten(),\n np.zeros(elements),\n dxs*np.ones_like(elements),\n dys*np.ones_like(elements),\n hist.flatten(),\n color=colorss,\n zsort='average',\n **plotSettings.get('attributes', {}))\n ##################\n # STEM PLOT #\n ##################\n elif self.outStreamTypes[pltIndex] == 'stem':\n if 'linefmt' not in plotSettings:\n plotSettings['linefmt'] = 'b-'\n if 'markerfmt' not in plotSettings:\n plotSettings['markerfmt'] = 'bo'\n if 'basefmt' not in plotSettings:\n plotSettings['basefmt'] = 'r-'\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n for yIndex in range(len(self.yValues[pltIndex][key])):\n if self.dim == 2:\n self.actPlot = self.ax.stem(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n linefmt=plotSettings['linefmt'],\n markerfmt=plotSettings['markerfmt'],\n basefmt = plotSettings['linefmt'],\n use_line_collection=True,\n **plotSettings.get('attributes', {}))\n else:\n # it is a basic stem plot constructed using a standard line plot. For now we do not use the previous defined keywords...\n for zIndex in range(len(self.zValues[pltIndex][key])):\n for xx, yy, zz in zip(self.xValues[pltIndex][key][xIndex], self.yValues[pltIndex][key][yIndex], self.zValues[pltIndex][key][zIndex]):\n self.ax.plot([xx, xx], [yy, yy], [0, zz], '-')\n ##################\n # STEP PLOT #\n ##################\n elif self.outStreamTypes[pltIndex] == 'step':\n if self.dim == 2:\n if 'where' not in plotSettings:\n plotSettings['where'] = 'mid'\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n if self.xValues[pltIndex][key][xIndex].size < 2:\n xi = self.xValues[pltIndex][key][xIndex]\n else:\n xi = np.linspace(self.xValues[pltIndex][key][xIndex].min(), self.xValues[pltIndex][key][xIndex].max(), ast.literal_eval(plotSettings['interpPointsX']))\n for yIndex in range(len(self.yValues[pltIndex][key])):\n if self.yValues[pltIndex][key][yIndex].size <= 3:\n return\n yi = mathUtils.interpolateFunction(self.xValues[pltIndex][key][xIndex], self.yValues[pltIndex][key][yIndex], plotSettings)\n self.actPlot = self.ax.step(xi, yi, where=plotSettings['where'], **plotSettings.get('attributes', {}))\n else:\n self.raiseAWarning('step Plot not available in 3D')\n return\n ########################\n # PSEUDOCOLOR PLOT #\n ########################\n elif self.outStreamTypes[pltIndex] == 'pseudocolor':\n if self.dim == 2:\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n # Hopefully, x,y, and z are all the same length, so checking this\n # here should be good enough.\n # The problem is you cannot interpolate any amount of space if\n # you only have a single data point.\n if self.xValues[pltIndex][key][xIndex].size == 1:\n self.raiseAWarning('Nothing to Plot Yet. Continuing to next plot.')\n continue\n for yIndex in range(len(self.yValues[pltIndex][key])):\n if not self.colorMapCoordinates:\n self.raiseAMessage('pseudocolor Plot needs coordinates for color map... Returning without plotting')\n return\n for zIndex in range(len(self.colorMapValues[pltIndex][key])):\n if self.colorMapValues[pltIndex][key][zIndex].size <= 3:\n return\n xig, yig, Ci = mathUtils.interpolateFunction(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n plotSettings,\n z=self.colorMapValues[pltIndex][key][zIndex],\n returnCoordinate=True)\n if plotSettings['cmap'] == 'None':\n self.actPlot = self.ax.pcolormesh(xig,\n yig,\n ma.masked_where(np.isnan(Ci), Ci),\n **plotSettings.get('attributes', {}))\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n else:\n self.actPlot = self.ax.pcolormesh(xig,\n yig,\n ma.masked_where(np.isnan(Ci), Ci),\n cmap=matplotlib.cm.get_cmap(name = plotSettings['cmap']),\n **plotSettings.get('attributes', {}))\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_array(ma.masked_where(np.isnan(Ci), Ci))\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n actcm = self.fig.colorbar(m)\n actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n self.raiseAWarning('pseudocolor Plot is considered a 2D plot, not a 3D!')\n return\n ########################\n # SURFACE PLOT #\n ########################\n elif self.outStreamTypes[pltIndex] == 'surface':\n if self.dim == 2:\n self.raiseAWarning('surface Plot is NOT available for 2D plots, IT IS A 3D!')\n return\n else:\n if 'rstride' not in plotSettings:\n plotSettings['rstride'] = '1'\n if 'cstride' not in plotSettings:\n plotSettings['cstride'] = '1'\n if 'antialiased' not in plotSettings:\n plotSettings['antialiased'] = 'False'\n if 'linewidth' not in plotSettings:\n plotSettings['linewidth'] = '0'\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n # Hopefully, x,y, and z are all the same length, so checking this\n # here should be good enough.\n # The problem is you cannot interpolate any amount of space if\n # you only have a single data point.\n if self.xValues[pltIndex][key][xIndex].size == 1:\n self.raiseAWarning('Nothing to Plot Yet. Continuing to next plot.')\n continue\n for yIndex in range(len(self.yValues[pltIndex][key])):\n for zIndex in range(len(self.zValues[pltIndex][key])):\n if self.zValues[pltIndex][key][zIndex].size <= 3:\n return\n if self.colorMapCoordinates[pltIndex] is not None:\n xig, yig, Ci = mathUtils.interpolateFunction(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n plotSettings,\n z=self.colorMapValues[pltIndex][key][zIndex],\n returnCoordinate=True)\n xig, yig, zi = mathUtils.interpolateFunction(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n plotSettings,\n z=self.zValues[pltIndex][key][zIndex],\n returnCoordinate=True)\n if self.colorMapCoordinates[pltIndex] is not None:\n if self.actcm:\n first = False\n else:\n first = True\n if plotSettings['cmap'] == 'None':\n plotSettings['cmap'] = 'jet'\n self.actPlot = self.ax.plot_surface(xig,\n yig,\n ma.masked_where(np.isnan(zi), zi),\n rstride=ast.literal_eval(plotSettings['rstride']),\n cstride=ast.literal_eval(plotSettings['cstride']),\n facecolors=matplotlib.cm.get_cmap(name=plotSettings['cmap'])(ma.masked_where(np.isnan(Ci), Ci)),\n cmap=matplotlib.cm.get_cmap(name = plotSettings['cmap']),\n linewidth=ast.literal_eval(plotSettings['linewidth']),\n antialiased=ast.literal_eval(plotSettings['antialiased']),\n **plotSettings.get('attributes', {}))\n if first:\n self.actPlot.cmap = matplotlib.cm.get_cmap(name=plotSettings['cmap'])\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if first:\n m = matplotlib.cm.ScalarMappable(cmap = self.actPlot.cmap, norm = self.actPlot.norm)\n m.set_array(self.colorMapValues[pltIndex][key])\n self.actcm = self.fig.colorbar(m)\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_clim(vmin=min(self.colorMapValues[pltIndex][key][-1]), vmax=max(self.colorMapValues[pltIndex][key][-1]))\n self.actcm.draw_all()\n else:\n if plotSettings['cmap'] == 'None':\n self.actPlot = self.ax.plot_surface(xig,\n yig,\n ma.masked_where(np.isnan(zi), zi),\n rstride=ast.literal_eval(plotSettings['rstride']),\n cstride=ast.literal_eval(plotSettings['cstride']),\n linewidth=ast.literal_eval(plotSettings['linewidth']),\n antialiased=ast.literal_eval(plotSettings['antialiased']),\n **plotSettings.get('attributes', {}))\n if 'color' in plotSettings.get('attributes', {}):\n self.actPlot.set_color = plotSettings.get('attributes', {})['color']\n else:\n self.actPlot.set_color = 'blue'\n else:\n self.actPlot = self.ax.plot_surface(xig,\n yig,\n ma.masked_where(np.isnan(zi), zi),\n rstride=ast.literal_eval(plotSettings['rstride']),\n cstride=ast.literal_eval(plotSettings['cstride']),\n cmap=matplotlib.cm.get_cmap(name = plotSettings['cmap']),\n linewidth=ast.literal_eval(plotSettings['linewidth']),\n antialiased=ast.literal_eval(plotSettings['antialiased']),\n **plotSettings.get('attributes', {}))\n ########################\n # TRI-SURFACE PLOT #\n ########################\n elif self.outStreamTypes[pltIndex] == 'tri-surface':\n if self.dim == 2:\n self.raiseAWarning('TRI-surface Plot is NOT available for 2D plots, it is 3D!')\n return\n else:\n if 'color' not in plotSettings:\n plotSettings['color'] = 'b'\n if 'shade' not in plotSettings:\n plotSettings['shade'] = 'False'\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n # Hopefully, x,y, and z are all the same length, so checking this\n # here should be good enough.\n # The problem is you cannot interpolate any amount of space if\n # you only have a single data point.\n if self.xValues[pltIndex][key][xIndex].size == 1:\n self.raiseAWarning('Nothing to Plot Yet. Continuing to next plot.')\n continue\n for yIndex in range(len(self.yValues[pltIndex][key])):\n for zIndex in range(len(self.zValues[pltIndex][key])):\n metric = (self.xValues[pltIndex][key][xIndex] ** 2 + self.yValues[pltIndex][key][yIndex] ** 2) ** 0.5\n metricIndeces = np.argsort(metric)\n xs = np.zeros(self.xValues[pltIndex][key][xIndex].shape)\n ys = np.zeros(self.yValues[pltIndex][key][yIndex].shape)\n zs = np.zeros(self.zValues[pltIndex][key][zIndex].shape)\n for sindex in range(len(metricIndeces)):\n xs[sindex] = self.xValues[pltIndex][key][xIndex][metricIndeces[sindex]]\n ys[sindex] = self.yValues[pltIndex][key][yIndex][metricIndeces[sindex]]\n zs[sindex] = self.zValues[pltIndex][key][zIndex][metricIndeces[sindex]]\n surfacePlotOptions = {'color': plotSettings['color'],\n 'shade': ast.literal_eval(plotSettings['shade'])}\n surfacePlotOptions.update(plotSettings.get('attributes', {}))\n if self.zValues[pltIndex][key][zIndex].size <= 3:\n return\n if self.colorMapCoordinates[pltIndex] is not None:\n if self.actcm:\n first = False\n else:\n first = True\n if plotSettings['cmap'] == 'None':\n plotSettings['cmap'] = 'jet'\n surfacePlotOptions['cmap'] = matplotlib.cm.get_cmap(name = plotSettings['cmap'])\n self.actPlot = self.ax.plot_trisurf(xs, ys, zs, **surfacePlotOptions)\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if first:\n self.actPlot.cmap = matplotlib.cm.get_cmap(name=plotSettings['cmap'])\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_array(self.colorMapValues[pltIndex][key])\n self.actcm = self.fig.colorbar(m)\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_clim(vmin=min(self.colorMapValues[pltIndex][key][-1]), vmax=max(self.colorMapValues[pltIndex][key][-1]))\n self.actcm.draw_all()\n else:\n if plotSettings['cmap'] != 'None':\n surfacePlotOptions[\"cmap\"] = matplotlib.cm.get_cmap(name=plotSettings['cmap'])\n self.actPlot = self.ax.plot_trisurf(xs, ys, zs, **surfacePlotOptions)\n ########################\n # WIREFRAME PLOT #\n ########################\n elif self.outStreamTypes[pltIndex] == 'wireframe':\n if self.dim == 2:\n self.raiseAWarning('wireframe Plot is NOT available for 2D plots, IT IS A 3D!')\n return\n else:\n if 'rstride' not in plotSettings:\n plotSettings['rstride'] = '1'\n if 'cstride' not in plotSettings:\n plotSettings['cstride'] = '1'\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n # Hopefully, x,y, and z are all the same length, so checking this\n # here should be good enough.\n # The problem is you cannot interpolate any amount of space if\n # you only have a single data point.\n if self.xValues[pltIndex][key][xIndex].size == 1:\n self.raiseAWarning('Nothing to Plot Yet. Continuing to next plot.')\n continue\n for yIndex in range(len(self.yValues[pltIndex][key])):\n for zIndex in range(len(self.zValues[pltIndex][key])):\n if self.zValues[pltIndex][key][zIndex].size <= 3:\n return\n if self.colorMapCoordinates[pltIndex] is not None:\n xig, yig, Ci = mathUtils.interpolateFunction(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n plotSettings,\n z=self.colorMapValues[pltIndex][key][zIndex],\n returnCoordinate=True)\n xig, yig, zi = mathUtils.interpolateFunction(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n plotSettings,\n z=self.zValues[pltIndex][key][zIndex],\n returnCoordinate=True)\n if self.colorMapCoordinates[pltIndex] is not None:\n self.raiseAWarning(f'Currently, ax.plot_wireframe() in MatPlotLib version: {matplotlib.__version__} does not support a colormap! Wireframe plotted on a surface plot...')\n if self.actcm:\n first = False\n else:\n first = True\n if plotSettings['cmap'] == 'None':\n plotSettings['cmap'] = 'jet'\n self.actPlot = self.ax.plot_wireframe(xig,\n yig,\n ma.masked_where(np.isnan(zi), zi),\n rstride=ast.literal_eval(plotSettings['rstride']),\n cmap=matplotlib.cm.get_cmap(name = plotSettings['cmap']),\n cstride=ast.literal_eval(plotSettings['cstride']),\n **plotSettings.get('attributes', {}))\n self.actPlot = self.ax.plot_surface(xig,\n yig,\n ma.masked_where(np.isnan(zi), zi),\n alpha=0.4,\n rstride=ast.literal_eval(plotSettings['rstride']),\n cmap=matplotlib.cm.get_cmap(name=plotSettings['cmap']),\n cstride=ast.literal_eval(plotSettings['cstride']),\n **plotSettings.get('attributes', {}))\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if first:\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_array(self.colorMapValues[pltIndex][key])\n self.actcm = self.fig.colorbar(m)\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n m = matplotlib.cm.ScalarMappable(cmap=self.actPlot.cmap, norm=self.actPlot.norm)\n m.set_clim(vmin=min(self.colorMapValues[pltIndex][key][-1]), vmax=max(self.colorMapValues[pltIndex][key][-1]))\n self.actcm.draw_all()\n else:\n if plotSettings['cmap'] == 'None':\n self.actPlot = self.ax.plot_wireframe(xig,\n yig,\n ma.masked_where(np.isnan(zi), zi),\n rstride=ast.literal_eval(plotSettings['rstride']),\n cstride=ast.literal_eval(plotSettings['cstride']),\n **plotSettings.get('attributes', {}))\n if 'color' in plotSettings.get('attributes', {}):\n self.actPlot.set_color = plotSettings.get('attributes', {})['color']\n else:\n self.actPlot.set_color = 'blue'\n else:\n self.actPlot = self.ax.plot_wireframe(xig,\n yig,\n ma.masked_where(np.isnan(zi), zi),\n rstride=ast.literal_eval(plotSettings['rstride']),\n cstride=ast.literal_eval(plotSettings['cstride']),\n **plotSettings.get('attributes', {}))\n ########################\n # CONTOUR PLOT #\n ########################\n elif self.outStreamTypes[pltIndex] == 'contour' or self.outStreamTypes[pltIndex] == 'filledContour':\n if self.dim == 2:\n if 'numberBins' in plotSettings:\n nbins = int(plotSettings['numberBins'])\n else:\n nbins = 5\n for key in self.xValues[pltIndex]:\n if not self.colorMapCoordinates:\n self.raiseAWarning(self.outStreamTypes[pltIndex] + ' Plot needs coordinates for color map... Returning without plotting')\n return\n for xIndex in range(len(self.xValues[pltIndex][key])):\n # Hopefully, x,y, and z are all the same length, so checking this\n # here should be good enough.\n # The problem is you cannot interpolate any amount of space if\n # you only have a single data point.\n if self.xValues[pltIndex][key][xIndex].size == 1:\n self.raiseAWarning('Nothing to Plot Yet. Continuing to next plot.')\n continue\n for yIndex in range(len(self.yValues[pltIndex][key])):\n for zIndex in range(len(self.colorMapValues[pltIndex][key])):\n if self.actcm:\n first = False\n else:\n first = True\n xig, yig, Ci = mathUtils.interpolateFunction(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n plotSettings,\n z=self.colorMapValues[pltIndex][key][zIndex],\n returnCoordinate=True)\n if self.outStreamTypes[pltIndex] == 'contour':\n if plotSettings['cmap'] == 'None':\n if 'color' in plotSettings.get('attributes', {}):\n color = plotSettings.get('attributes', {})['color']\n else:\n color = 'blue'\n self.actPlot = self.ax.contour(xig,\n yig,\n ma.masked_where(np.isnan(Ci), Ci),\n nbins,\n colors=color,\n **plotSettings.get('attributes', {}))\n else:\n self.actPlot = self.ax.contour(xig,\n yig,\n ma.masked_where(np.isnan(Ci), Ci),\n nbins,\n **plotSettings.get('attributes', {}))\n else:\n if plotSettings['cmap'] == 'None':\n plotSettings['cmap'] = 'jet'\n self.actPlot = self.ax.contourf(xig,\n yig,\n ma.masked_where(np.isnan(Ci), Ci),\n nbins,\n **plotSettings.get('attributes', {}))\n self.ax.clabel(self.actPlot, inline=1, fontsize=10)\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if first:\n self.actcm = self.fig.colorbar(self.actPlot, shrink=0.8, extend='both')\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n m = matplotlib.cm.ScalarMappable(cmap = self.actPlot.cmap, norm = self.actPlot.norm)\n m.set_clim(vmin = min(self.colorMapValues[pltIndex][key][-1]), vmax = max(self.colorMapValues[pltIndex][key][-1]))\n self.actcm.draw_all()\n else:\n self.raiseAWarning('contour/filledContour is a 2-D plot, where x,y are the surface coordinates and colorMap vector is the array to visualize!\\n contour3D/filledContour3D are 3-D! ')\n return\n # These should be combined: ^^^ & vvv\n elif self.outStreamTypes[pltIndex] == 'contour3D' or self.outStreamTypes[pltIndex] == 'filledContour3D':\n if self.dim == 2:\n self.raiseAWarning('contour3D/filledContour3D Plot is NOT available for 2D plots, IT IS A 2D! Check \"contour/filledContour\"!')\n return\n else:\n if 'numberBins' in plotSettings:\n nbins = int(plotSettings['numberBins'])\n else:\n nbins = 5\n if 'extend3D' in plotSettings:\n ext3D = bool(plotSettings['extend3D'])\n else:\n ext3D = False\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n # Hopefully, x,y, and z are all the same length, so checking this\n # here should be good enough.\n # The problem is you cannot interpolate any amount of space if\n # you only have a single data point.\n if self.xValues[pltIndex][key][xIndex].size == 1:\n self.raiseAWarning('Nothing to Plot Yet. Continuing to next plot.')\n continue\n for yIndex in range(len(self.yValues[pltIndex][key])):\n for zIndex in range(len(self.colorMapValues[pltIndex][key])):\n if self.actcm:\n first = False\n else:\n first = True\n xig, yig, Ci = mathUtils.interpolateFunction(self.xValues[pltIndex][key][xIndex],\n self.yValues[pltIndex][key][yIndex],\n plotSettings,\n z=self.colorMapValues[pltIndex][key][zIndex],\n returnCoordinate=True)\n if self.outStreamTypes[pltIndex] == 'contour3D':\n if plotSettings['cmap'] == 'None':\n if 'color' in plotSettings.get('attributes', {}):\n color = plotSettings.get('attributes', {})['color']\n else:\n color = 'blue'\n self.actPlot = self.ax.contour3D(xig,\n yig,\n ma.masked_where(np.isnan(Ci), Ci),\n nbins,\n colors=color,\n extend3d=ext3D,\n **plotSettings.get('attributes', {}))\n else:\n self.actPlot = self.ax.contour3D(xig,\n yig,\n ma.masked_where(np.isnan(Ci), Ci),\n nbins,\n extend3d=ext3D,\n cmap=matplotlib.cm.get_cmap(name=plotSettings['cmap']),\n **plotSettings.get('attributes', {}))\n else:\n if plotSettings['cmap'] == 'None':\n plotSettings['cmap'] = 'jet'\n self.actPlot = self.ax.contourf3D(xig,\n yig,\n ma.masked_where(np.isnan(Ci), Ci),\n nbins,\n cmap=matplotlib.cm.get_cmap(name=plotSettings['cmap']),\n **plotSettings.get('attributes', {}))\n self.ax.clabel(self.actPlot, inline=1, fontsize=10)\n if 'colorbar' not in self.options or self.options['colorbar']['colorbar'] != 'off':\n if first:\n self.actcm = self.fig.colorbar(self.actPlot, shrink = 0.8, extend = 'both')\n self.actcm.set_label(self.colorMapCoordinates[pltIndex][0].split('|')[-1].replace(')', ''))\n else:\n m = matplotlib.cm.ScalarMappable(cmap = self.actPlot.cmap, norm = self.actPlot.norm)\n m.set_clim(vmin = min(self.colorMapValues[pltIndex][key][-1]), vmax = max(self.colorMapValues[pltIndex][key][-1]))\n self.actcm.draw_all()\n ########################\n # DataMining PLOT #\n ########################\n elif self.outStreamTypes[pltIndex] == 'dataMining':\n colors = cycle(['#88CCEE', '#DDCC77', '#AA4499', '#117733', '#332288', '#999933', '#44AA99', '#882255', '#CC6677', '#CD6677', '#DC6877', '#886677', '#AA6677', '#556677', '#CD7865'])\n if 's' not in plotSettings:\n plotSettings['s'] = '20'\n if 'c' not in plotSettings:\n plotSettings['c'] = 'b'\n if 'marker' not in plotSettings:\n plotSettings['marker'] = 'o'\n if 'alpha' not in plotSettings:\n plotSettings['alpha'] = 'None'\n if 'linewidths' not in plotSettings:\n plotSettings['linewidths'] = 'None'\n clusterDict[pltIndex] = {}\n for key in self.xValues[pltIndex]:\n for xIndex in range(len(self.xValues[pltIndex][key])):\n for yIndex in range(len(self.yValues[pltIndex][key])):\n dataMiningPlotOptions = {'s': ast.literal_eval(plotSettings['s']),\n 'marker': (plotSettings['marker']),\n 'alpha': ast.literal_eval(plotSettings['alpha']),\n 'linewidths': ast.literal_eval(plotSettings['linewidths'])}\n if self.colorMapCoordinates[pltIndex] is not None:\n self.raiseAWarning('ColorMap values supplied, however DataMining plots do not use colorMap from input.')\n if plotSettings['cmap'] == 'None':\n self.raiseAWarning('ColorSet supplied, however DataMining plots do not use color set from input.')\n if 'cluster' == plotSettings['SKLtype']:\n # TODO: include the cluster Centers to the plot\n if 'noClusters' in plotSettings.get('attributes', {}):\n clusterDict[pltIndex]['noClusters'] = int(plotSettings.get('attributes', {})['noClusters'])\n plotSettings.get('attributes', {}).pop('noClusters')\n else:\n clusterDict[pltIndex]['noClusters'] = np.amax(self.clusterValues[pltIndex][1][0]) + 1\n dataMiningPlotOptions.update(plotSettings.get('attributes', {}))\n if self.dim == 2:\n clusterDict[pltIndex]['clusterValues'] = np.zeros(shape=(len(self.xValues[pltIndex][key][xIndex]), 2))\n else:\n clusterDict[pltIndex]['clusterValues'] = np.zeros(shape=(len(self.xValues[pltIndex][key][xIndex]), 3))\n clusterDict[pltIndex]['clusterValues'][:, 0] = self.xValues[pltIndex][key][xIndex]\n clusterDict[pltIndex]['clusterValues'][:, 1] = self.yValues[pltIndex][key][yIndex]\n if self.dim == 2:\n for k, col in zip(range(int(clusterDict[pltIndex]['noClusters'])), colors):\n myMembers = self.clusterValues[pltIndex][1][0] == k\n self.actPlot = self.ax.scatter(clusterDict[pltIndex]['clusterValues'][myMembers, 0],\n clusterDict[pltIndex]['clusterValues'][myMembers, 1],\n color=col,\n **dataMiningPlotOptions)\n\n # Handle all of the outlying data\n myMembers = self.clusterValues[pltIndex][1][0] == -1\n # resize the points\n dataMiningPlotOptions['s'] /= 2\n # and hollow out their markers\n if 'facecolors' in dataMiningPlotOptions:\n faceColors = dataMiningPlotOptions['facecolors']\n else:\n faceColors = None\n dataMiningPlotOptions['facecolors'] = 'none'\n\n self.actPlot = self.ax.scatter(clusterDict[pltIndex]['clusterValues'][myMembers, 0],\n clusterDict[pltIndex]['clusterValues'][myMembers, 1],\n color='#000000',\n **dataMiningPlotOptions)\n\n # Restore the plot options to their original values\n dataMiningPlotOptions['s'] *= 2\n if faceColors is not None:\n dataMiningPlotOptions['facecolors'] = faceColors\n else:\n del dataMiningPlotOptions['facecolors']\n\n else:\n for zIndex in range(len(self.zValues[pltIndex][key])):\n clusterDict[pltIndex]['clusterValues'][:, 2] = self.zValues[pltIndex][key][zIndex]\n for k, col in zip(range(int(clusterDict[pltIndex]['noClusters'])), colors):\n myMembers = self.clusterValues[pltIndex][1][0] == k\n self.actPlot = self.ax.scatter(clusterDict[pltIndex]['clusterValues'][myMembers, 0],\n clusterDict[pltIndex]['clusterValues'][myMembers, 1],\n clusterDict[pltIndex]['clusterValues'][myMembers, 2],\n color=col,\n **dataMiningPlotOptions)\n\n # Handle all of the outlying data\n myMembers = self.clusterValues[pltIndex][1][0] == -1\n # resize the points\n dataMiningPlotOptions['s'] /= 2\n # and hollow out their markers\n if 'facecolors' in dataMiningPlotOptions:\n faceColors = dataMiningPlotOptions['facecolors']\n else:\n faceColors = None\n dataMiningPlotOptions['facecolors'] = 'none'\n\n self.actPlot = self.ax.scatter(clusterDict[pltIndex]['clusterValues'][myMembers, 0],\n clusterDict[pltIndex]['clusterValues'][myMembers, 1],\n clusterDict[pltIndex]['clusterValues'][myMembers, 2],\n color='#000000',\n **dataMiningPlotOptions)\n\n # Restore the plot options to their original values\n dataMiningPlotOptions['s'] *= 2\n if faceColors is not None:\n dataMiningPlotOptions['facecolors'] = faceColors\n else:\n del dataMiningPlotOptions['facecolors']\n\n elif 'bicluster' == plotSettings['SKLtype']:\n self.raiseAnError(IOError, 'SKLType Bi-Cluster Plots are not implemented yet!..')\n elif 'mixture' == plotSettings['SKLtype']:\n if 'noMixtures' in plotSettings.get('attributes', {}):\n clusterDict[pltIndex]['noMixtures'] = int(plotSettings.get('attributes', {})['noMixtures'])\n plotSettings.get('attributes', {}).pop('noMixtures')\n else:\n clusterDict[pltIndex]['noMixtures'] = np.amax(self.mixtureValues[pltIndex][1][0]) + 1\n if self.dim == 3:\n self.raiseAnError(IOError, 'SKLType Mixture Plots are only available in 2-Dimensions')\n else:\n clusterDict[pltIndex]['mixtureValues'] = np.zeros(shape = (len(self.xValues[pltIndex][key][xIndex]), 2))\n clusterDict[pltIndex]['mixtureValues'][:, 0] = self.xValues[pltIndex][key][xIndex]\n clusterDict[pltIndex]['mixtureValues'][:, 1] = self.yValues[pltIndex][key][yIndex]\n if 'mixtureCovars' in plotSettings.get('attributes', {}):\n split = self.__splitVariableNames('mixtureCovars', (pltIndex, 0))\n # mixtureCovars = self.sourceData[pltIndex].getParam(split[1], split[2], nodeId = 'ending')\n plotSettings.get('attributes', {}).pop('mixtureCovars')\n # else:\n # mixtureCovars = None\n if 'mixtureMeans' in plotSettings.get('attributes', {}):\n split = self.__splitVariableNames('mixtureMeans', (pltIndex, 0))\n # mixtureMeans = self.sourceData[pltIndex].getParam(split[1], split[2], nodeId = 'ending')\n plotSettings.get('attributes', {}).pop('mixtureMeans')\n # else:\n # mixtureMeans = None\n # mixtureCovars.reshape(3, 4)\n # mixtureMeans.reshape(3, 4)\n # for i, (mean, covar, col) in enumerate(zip(mixtureMeans, mixtureCovars, colors)):\n for i, col in zip(range(clusterDict[pltIndex]['noMixtures']), colors):\n if not np.any(self.mixtureValues[pltIndex][1][0] == i):\n continue\n myMembers = self.mixtureValues[pltIndex][1][0] == i\n self.actPlot = self.ax.scatter(clusterDict[pltIndex]['mixtureValues'][myMembers, 0],\n clusterDict[pltIndex]['mixtureValues'][myMembers, 1],\n color=col,\n **dataMiningPlotOptions)\n elif 'manifold' == plotSettings['SKLtype']:\n if self.dim == 2:\n manifoldValues = np.zeros(shape=(len(self.xValues[pltIndex][key][xIndex]), 2))\n else:\n manifoldValues = np.zeros(shape=(len(self.xValues[pltIndex][key][xIndex]), 3))\n manifoldValues[:, 0] = self.xValues[pltIndex][key][xIndex]\n manifoldValues[:, 1] = self.yValues[pltIndex][key][yIndex]\n if 'clusterLabels' in plotSettings.get('attributes', {}):\n split = self.__splitVariableNames('clusterLabels', (pltIndex, 0))\n clusterDict[pltIndex]['clusterLabels'] = self.sourceData[pltIndex].getParam(split[1], split[2], nodeId = 'ending')\n plotSettings.get('attributes', {}).pop('clusterLabels')\n else:\n clusterDict[pltIndex]['clusterLabels'] = None\n if 'noClusters' in plotSettings.get('attributes', {}):\n clusterDict[pltIndex]['noClusters'] = int(plotSettings.get('attributes', {})['noClusters'])\n plotSettings.get('attributes', {}).pop('noClusters')\n else:\n clusterDict[pltIndex]['noClusters'] = np.amax(self.clusterValues[pltIndex][1][0]) + 1\n if self.clusterValues[pltIndex][1][0] is not None:\n if self.dim == 2:\n for k, col in zip(range(clusterDict[pltIndex]['noClusters']), colors):\n myMembers = self.clusterValues[pltIndex][1][0] == k\n self.actPlot = self.ax.scatter(manifoldValues[myMembers, 0],\n manifoldValues[myMembers, 1],\n color=col,\n **dataMiningPlotOptions)\n else:\n for zIndex in range(len(self.zValues[pltIndex][key])):\n manifoldValues[:, 2] = self.zValues[pltIndex][key][zIndex]\n for k, col in zip(range(clusterDict[pltIndex]['noClusters']), colors):\n myMembers = self.clusterValues[pltIndex][1][0] == k\n self.actPlot = self.ax.scatter(manifoldValues[myMembers, 0],\n manifoldValues[myMembers, 1],\n manifoldValues[myMembers, 2],\n color=col,\n **dataMiningPlotOptions)\n else:\n if self.dim == 2:\n self.actPlot = self.ax.scatter(manifoldValues[:, 0],\n manifoldValues[:, 1],\n **dataMiningPlotOptions)\n else:\n for zIndex in range(len(self.zValues[pltIndex][key])):\n manifoldValues[:, 2] = self.zValues[pltIndex][key][zIndex]\n self.actPlot = self.ax.scatter(manifoldValues[:, 0],\n manifoldValues[:, 1],\n manifoldValues[:, 2],\n **dataMiningPlotOptions)\n elif 'decomposition' == plotSettings['SKLtype']:\n if self.dim == 2:\n decompositionValues = np.zeros(shape = (len(self.xValues[pltIndex][key][xIndex]), 2))\n else:\n decompositionValues = np.zeros(shape = (len(self.xValues[pltIndex][key][xIndex]), 3))\n decompositionValues[:, 0] = self.xValues[pltIndex][key][xIndex]\n decompositionValues[:, 1] = self.yValues[pltIndex][key][yIndex]\n if 'noClusters' in plotSettings.get('attributes', {}):\n clusterDict[pltIndex]['noClusters'] = int(plotSettings.get('attributes', {})['noClusters'])\n plotSettings.get('attributes', {}).pop('noClusters')\n else:\n clusterDict[pltIndex]['noClusters'] = np.amax(self.clusterValues[pltIndex][1][0]) + 1\n if self.clusterValues[pltIndex][1][0] is not None:\n if self.dim == 2:\n for k, col in zip(range(clusterDict[pltIndex]['noClusters']), colors):\n myMembers = self.clusterValues[pltIndex][1][0] == k\n self.actPlot = self.ax.scatter(decompositionValues[myMembers, 0],\n decompositionValues[myMembers, 1],\n color=col,\n **dataMiningPlotOptions)\n else:\n for zIndex in range(len(self.zValues[pltIndex][key])):\n decompositionValues[:, 2] = self.zValues[pltIndex][key][zIndex]\n for k, col in zip(range(clusterDict[pltIndex]['noClusters']), colors):\n myMembers = self.clusterValues[pltIndex][1][0] == k\n self.actPlot = self.ax.scatter(decompositionValues[myMembers, 0],\n decompositionValues[myMembers, 1],\n decompositionValues[myMembers, 2],\n color=col,\n **dataMiningPlotOptions)\n else:\n # no ClusterLabels\n if self.dim == 2:\n self.actPlot = self.ax.scatter(decompositionValues[:, 0],\n decompositionValues[:, 1],\n **dataMiningPlotOptions)\n else:\n for zIndex in range(len(self.zValues[pltIndex][key])):\n decompositionValues[:, 2] = self.zValues[pltIndex][key][zIndex]\n self.actPlot = self.ax.scatter(decompositionValues[:, 0],\n decompositionValues[:, 1],\n decompositionValues[:, 2],\n **dataMiningPlotOptions)\n else:\n # Let's try to \"write\" the code for the plot on the fly\n self.raiseAWarning('Trying to create a non-predefined plot of type ' + self.outStreamTypes[pltIndex] + '. If this fails, please refer to the and/or the related matplotlib method specification.')\n kwargs = {}\n for kk in plotSettings:\n if kk != 'attributes' and kk != self.outStreamTypes[pltIndex]:\n try:\n kwargs[kk] = ast.literal_eval(plotSettings[kk])\n except ValueError:\n kwargs[kk] = plotSettings[kk]\n try:\n if self.dim == 2:\n customFunctionCall = getattr(self.ax, self.outStreamTypes[pltIndex])\n else:\n customFunctionCall = getattr(self.ax, self.outStreamTypes[pltIndex])\n self.actPlot = customFunctionCall(**kwargs)\n except AttributeError as ae:\n self.raiseAnError(RuntimeError, '<' + str(ae) + '> -> in execution custom plot \"' + self.outStreamTypes[pltIndex] + '\" in Plot ' + self.name + '.\\nSTREAM MANAGER: ERROR -> command has been called in the following way: ' + 'ax.' + self.outStreamTypes[pltIndex])\n\n if 'legend' in self.options['plotSettings']:\n self.fig.legend(**self.options['plotSettings']['legend'])\n\n # SHOW THE PICTURE\n self.__executeActions()\n self.fig.canvas.draw_idle()\n\n if 'screen' in self.destinations and display:\n def handle_close(event):\n \"\"\"\n This method is aimed to handle the closing of figures (overall when in interactive mode)\n @ In, event, instance, the event to close\n @ Out, None\n \"\"\"\n self.fig.canvas.stop_event_loop()\n self.raiseAMessage('Closed Figure')\n self.fig.canvas.mpl_connect('close_event', handle_close)\n # self.plt.pause(1e-6)\n # The following code is extracted from pyplot.pause without actually\n # needing to force the code to sleep, according to MPL's documentation,\n # this feature is experimental, hopefully by not calling the pause\n # function, we can obtain consistent results.\n # We are skipping a few of the sanity checks done in that function,\n # since we are sure we have an interactive backend and access to the\n # correct type of canvas and figure.\n self.fig.canvas.draw()\n # If your graphs are unresponsive to user input, you may want to consider\n # adjusting this timeout, to allow more time for the input to be handled.\n self.fig.canvas.start_event_loop(1e-3)\n\n # self.fig.canvas.flush_events()\n\n for fileType in self.destinations:\n if fileType == 'screen':\n continue\n\n if not self.overwrite:\n prefix = str(self.counter) + '-'\n else:\n prefix = ''\n\n if len(self.filename) > 0:\n name = self.filename\n else:\n name = prefix + self.name + '_' + str(self.outStreamTypes).replace(\"'\", \"\").replace(\"[\", \"\").replace(\"]\", \"\").replace(\",\", \"-\").replace(\" \", \"\")\n\n if self.subDirectory is not None:\n name = os.path.join(self.subDirectory,name)\n\n self.fig.savefig(name + '.' + fileType, format=fileType)\n\n if 'screen' not in self.destinations:\n plt.close(fig=self.fig)\n\n gc.collect()",
"def PlotAirplane():\n airplane = vtkInterface.PolyData(planefile)\n airplane.Plot()",
"def _update_plot(self) -> None:\n\n # Check if plotting is active\n if self._fig is None:\n return None\n LOG.debug(\"Updating plot.\")\n\n # Extract glaciated area\n hs_back = np.ma.masked_where(\n self.h <= 1,\n hillshade(\n self.ele, self.PLOT_HILLSHADE_AZIMUTH, self.PLOT_HILLSHADE_ALTITUDE\n ),\n )\n\n # Clear plot and draw axes\n self._fig.clear()\n ax = plt.subplot(121, facecolor=\"black\")\n ax.tick_params(axis=\"x\", colors=\"w\")\n ax.tick_params(axis=\"y\", colors=\"w\")\n ax.set(xlabel=\"X-coordinate [m]\", ylabel=\"Y-coordinate [m]\")\n ax.xaxis.label.set_color(\"w\")\n ax.yaxis.label.set_color(\"w\")\n title_text = f\"Year: {str(self.i)} ELA: {str(int(self.ela))} m.a.s.l.\"\n ax.set_title(title_text, color=\"white\", size=18)\n\n # Draw new image layers\n plt.imshow(self.hs, vmin=90, vmax=345, cmap=\"copper\", extent=self.extent)\n plt.imshow(255 - hs_back, vmin=1, vmax=150, cmap=\"Greys\", extent=self.extent)\n\n # Mass balance\n ax1 = plt.subplot(222, facecolor=\"black\")\n ax1.plot(self.mass_balance, color=\"w\")\n ax1.plot(self.mass_balance_trend, color=\"r\")\n ax1.set(ylabel=\"Mass balance [m]\")\n ax1.yaxis.label.set_color(\"w\")\n plt.setp(ax1.get_xticklabels(), visible=False)\n ax1.tick_params(axis=\"y\", colors=\"w\")\n ax1.set_title(f\"Gradient: {str(self.m)} m/m\", color=\"white\", size=18)\n\n # Plot mean thickness\n ax2 = plt.subplot(224, sharex=ax1, facecolor=\"black\")\n ax2.plot(self.mass, color=\"w\")\n ax2.set(xlabel=\"Year [a]\", ylabel=\"Mean thickness [m]\")\n ax2.xaxis.label.set_color(\"w\")\n ax2.yaxis.label.set_color(\"w\")\n ax2.tick_params(axis=\"x\", colors=\"w\")\n ax2.tick_params(axis=\"y\", colors=\"w\")\n\n # Draw new plot\n self._fig.canvas.draw()\n plt.pause(0.05)",
"def plot(data, interactive=False):\n if interactive:\n plt.ion()\n fig = plt.figure()\n fig.canvas.draw()\n image = call_imshow(data)\n else:\n fig = plt.figure()\n image = call_imshow(data)\n plt.show()\n return fig, image",
"def _doPlots(self):\n ax = self.sp.ax\n if ax: ax.helper.doPlots()\n # Setting calls now use new local options\n self.opts.newLocal()",
"def plot(self):\n self.plotsite()\n self.plotbond()\n plt.show()",
"def figure4():\n\n plot_settings = {'y_limits': [-80, -50],\n 'x_limits': None,\n 'y_ticks': [-80, -70, -60, -50],\n 'locator_size': 5,\n 'y_label': 'Voltage (mV)',\n 'x_ticks': [],\n 'scale_size': 20,\n 'x_label': \"\",\n 'scale_loc': 4,\n 'figure_name': 'figure_4',\n 'legend': ['control', 'apamin'],\n 'legend_size': 8,\n 'y_on': True}\n line_styles = ['-', 'dotted']\n\n plt.figure(figsize=(5, 3), dpi=96)\n\n plt.subplot(2, 1, 1) # Generate figure 1 (top)\n for ix, g_sk_bar in enumerate([0.3, 0]):\n t, y = solver(100, g_sk_bar=g_sk_bar)\n plt.plot(t, y[:, 0], c='k', linestyle=line_styles[ix])\n alter_figure(plot_settings) # Alter figure for publication\n\n plt.subplot(2, 1, 2)\n t1 = 1200\n t, y = solver(t1, t_start=50, duration=t1, i_bias_on=0.33, g_sk_bar=0.03)\n plt.plot(t, y[:, 0], 'k-')\n\n plot_settings['y_limits'] = [-100, 30]\n plot_settings['x_limits'] = [0, t1]\n plot_settings['y_ticks'] = [-80, -60, -40, -20, 0, 20]\n plot_settings['locator_size'] = 10\n plot_settings['scale_size'] = 100\n plot_settings['legend'] = None\n alter_figure(plot_settings, close=True) # Alter plot for publication",
"def setplot(plotdata):\n#-------------------------- \n\n\n plotdata.clearfigures() # clear any old figures,axes,items data\n\n # Figure for q[0]\n plotfigure = plotdata.new_plotfigure(name='Pressure', figno=1)\n\n # Set up for axes in this figure:\n plotaxes = plotfigure.new_plotaxes()\n plotaxes.axescmd = 'subplot(211)'\n \n #plotaxes.xlimits = [0.,150.]\n plotaxes.ylimits = [-1.,1.0]\n plotaxes.title = 'Pressure'\n\n # Set up for item on these axes:\n plotitem = plotaxes.new_plotitem(plot_type='1d_plot')\n plotitem.plot_var = 0\n plotitem.plotstyle = '-o'\n plotitem.color = 'b'\n plotitem.show = True # show on plot?\n plotitem.kwargs = {'linewidth':2,'markersize':5}\n \n\n\n # Set up for axes in this figure:\n plotaxes = plotfigure.new_plotaxes()\n plotaxes.axescmd = 'subplot(212)'\n plotaxes.xlimits = 'auto'\n plotaxes.ylimits = [-1.,1.]\n plotaxes.title = 'Velocity'\n\n # Set up for item on these axes:\n plotitem = plotaxes.new_plotitem(plot_type='1d_plot')\n plotitem.plot_var = 1\n plotitem.plotstyle = '-'\n plotitem.color = 'b'\n plotitem.show = True # show on plot?\n plotitem.kwargs = {'linewidth':3,'markersize':5}\n \n\n # Parameters used only when creating html and/or latex hardcopy\n # e.g., via visclaw.frametools.printframes:\n\n plotdata.printfigs = True # print figures\n plotdata.print_format = 'png' # file format\n plotdata.print_framenos = 'all' # list of frames to print\n plotdata.print_fignos = 'all' # list of figures to print\n plotdata.html = True # create html files of plots?\n plotdata.html_homelink = '../README.html'\n plotdata.latex = True # create latex file of plots?\n plotdata.latex_figsperline = 2 # layout of plots\n plotdata.latex_framesperline = 1 # layout of plots\n plotdata.latex_makepdf = False # also run pdflatex?\n\n return plotdata",
"def show():\n setup()\n plt.show()",
"def show_plot(figure_id=None):\n if figure_id is None:\n fig = pl.gcf()\n else:\n # do this even if figure_id == 0\n fig = pl.figure(num=figure_id)\n pl.show()\n pl.pause(1e-9)\n fig.canvas.manager.window.activateWindow()\n fig.canvas.manager.window.raise_()",
"def show_plot(self):\r\n\t\tself.generate_plot()\r\n\t\tplt.show()",
"def plot_finalize():\n global figure\n global axes\n\n plot_refresh()\n plt.ioff()\n plt.show()\n\n figure, axes = None, None",
"def _plotData(self, identity, src, plotFile, outputFile, overlay):\n # determine the max y-values for best overlay placement\n maxima = []\n acceleration = plt.figure()\n\n if self.multiplot:\n plots = len(self.graph[0])\n else:\n # create only one plot\n ax = acceleration.add_subplot(111)\n plots = 1\n\n if identity == 'accel':\n source = self.graph[0]\n identityLong = 'Acceleration'\n elif identity == 'gyro':\n source = self.graph[1]\n identityLong = 'Gyroscope'\n\n for i, accel in enumerate(source):\n j = i + 1 if self.multiplot else 1\n ax = acceleration.add_subplot(plots,1,j)\n ax.set_title('{} of {}'.format(identityLong, plotFile))\n ax.plot(\n src['x']\n , src[accel]\n , color=self.colors[i]\n , label=accel)\n if overlay:\n # place overlay arrows at 1/3 of y-max\n # yOverlay = 0.3 * max(maxima)\n yOverlay = 3 if identity == 'accel' else 200\n legend = []\n for gs in overlay:\n ax.annotate(''\n , xy=(gs['start'],yOverlay)\n , xycoords='data'\n , xytext=(gs['end'],yOverlay)\n , textcoords='data'\n , horizontalalignment='bottom'\n , verticalalignment='center'\n , arrowprops=dict(\n arrowstyle=\"<|-|>\"\n , fc=\"white\"\n , ec=\"black\"\n , alpha=0.5\n )\n ).set_alpha(0.5)\n legend.append(gs['id']+\": \"+gs['start']+\"-\"+gs['end'])\n\n at = AnchoredText(\"Gold Standard:\\n\"+\"\\n\".join(legend),\n prop=dict(size=10), loc=4)\n at.patch.set_boxstyle(\"round\")\n at.patch.set_alpha(0.5)\n ax.add_artist(at)\n\n ax.legend(\n loc=1\n , fontsize=10\n , fancybox=True).get_frame().set_alpha(0.5)\n # plt.show()\n plt.savefig(outputFile, format='svg')",
"def plot(self):\n self.fig = plt.figure('black hole')\n self.fig.clf() #clear the graph to avoir superposing data from the same set (can be deactivated if need to superpose)\n self.ax = plt.subplot()\n\n if self.img2 is not None:\n self.ax.imshow(self.img2)\n else:\n print(\"No black hole deformation in the memory, displayed the original image instead.\")\n self.ax.imshow(self.img_debut)\n\n self.fig.canvas.set_window_title('Black hole')\n self.ax.set_title(\"scrool to zoom in or out \\nright click to add an offset in the background \\nleft click to refresh image \\n close the option windows to stop the program\")\n self.fig.canvas.mpl_connect('scroll_event', self.onscroll)\n self.fig.canvas.mpl_connect('button_press_event', self.onclick)\n self.fig.canvas.mpl_connect('axes_leave_event', self.disconnect)\n self.fig.canvas.mpl_connect('axes_enter_event', self.connect)\n\n self.draw()",
"def plot():\n pass",
"def update_plot():\n pass",
"def create_figure(self) -> None:\n plt.ion()\n self.fig = plt.figure(1)\n self.axis = self.fig.add_subplot(111, xlim=(0, 1), ylim=(0, 1))\n self.axis.grid(True)\n plt.xticks(np.linspace(0, 1, self._param[\"n_v\"] + 1))\n plt.yticks(np.linspace(0, 1, self._param[\"n_v\"] + 1))\n a_plt, = self.axis.plot([], [], 'bx', markersize=5)\n l_plt, = self.axis.plot([], [], 'r.', markersize=15)\n self.plots = [a_plt, l_plt]",
"def _init_plot(self) -> None:\n\n # create a grayscale plot\n out = sys.stdout\n sys.stdout = open(\"/dev/null\", \"w\")\n hdu = self.image_generator.image(self.ra, self.dec)\n self.plot = aplpy.FITSFigure(hdu)\n self.plot.show_grayscale()\n self.plot.set_theme(\"publication\")\n sys.stdout = out\n\n # label for the position angle\n pa_string = \"PA = %.1f\" % self.mode_details.position_angle().to_value(u.deg)\n if self.mode_details.automated_position_angle():\n pa_string += \" (auto)\"\n self.draw_label(0.95, -0.05, pa_string, style=\"italic\", weight=\"bold\")\n\n # label for the title\n if self.title:\n self.draw_label(\n 0.5, 1.03, self.title, style=\"italic\", weight=\"bold\", size=\"large\"\n )\n\n # label for the image source\n self.draw_label(\n -0.05,\n -0.05,\n \"%s\" % self.image_generator.source(),\n style=\"italic\",\n weight=\"bold\",\n )\n\n # grid overlay\n self.plot.add_grid()\n self.plot.grid.set_alpha(0.2)\n self.plot.grid.set_color(\"b\")\n\n # indicate the RSS field of view\n self.draw_circle(self.ra, self.dec, 4.0 * u.arcmin, \"g\")\n self.draw_label(\n 0.79,\n 0.79,\n \"RSS\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n horizontalalignment=\"left\",\n color=(0, 0, 1),\n )\n\n # indicate the Salticam field of view\n self.draw_circle(self.ra, self.dec, 5.0 * u.arcmin, \"g\")\n self.draw_label(\n 0.86,\n 0.86,\n \"SCAM\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n horizontalalignment=\"left\",\n color=(0, 0, 1),\n )\n\n # labels for north and east direction\n self.draw_label(\n self.ra,\n self.dec + 4.8 * u.arcmin,\n \"N\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n color=(0, 0.5, 1),\n )\n self.draw_label(\n self.ra + 4.8 * u.arcmin / np.abs(np.cos(self.dec)),\n self.dec,\n \"E\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n horizontalalignment=\"right\",\n color=(0, 0.5, 1),\n )\n\n # add cross hairs\n self.draw_centered_line(\n 0 * u.deg,\n 8 * u.arcmin,\n self.ra,\n self.dec,\n color=\"g\",\n linewidth=0.5,\n alpha=1.0,\n )\n self.draw_centered_line(\n 90 * u.deg,\n 8 * u.arcmin,\n self.ra,\n self.dec,\n color=\"g\",\n linewidth=0.5,\n alpha=1.0,\n )\n\n # label for the magnitude range and bandpass\n if self.magnitude_range:\n self._show_magnitudes()\n\n # add mode specific content\n if not self.basic_annotations:\n self.mode_details.annotate_finder_chart(self)",
"def __plot_pres__(self, refresh=False, *args):\n # If plot is not requested, return:\n if not self.plotPressureVar.get():\n return\n\n # Check for a closed window:\n if 'pressure' in self.plots.keys() and not matplotlib.pyplot.fignum_exists(self.plots['pressure'].number):\n del self.plots['pressure']\n refresh = False\n # Update the existing plot, if it exists\n refresh = refresh or 'pressure' in self.plots.keys()\n if refresh:\n if 'pressure' in self.plots.keys():\n fig = self.plots['pressure']\n fig = matplotlib.pyplot.figure(fig.number)\n fig.clear()\n else:\n return\n # Make a new window:\n else:\n fig = matplotlib.pyplot.figure(figsize=(4,3))\n fig.canvas.set_window_title('pressure, time = ' + '{:.3f}'.format(1e9*self.imp.t(self.it)))\n ax = fig.add_subplot(111)\n\n # Plot:\n ax.plot(1e4*self.imp.r((self.it), self.ir)[0], self.imp.P((self.it), self.ir)[0], 'k-')\n\n ax.set_xlabel('r (um)', fontsize=12)\n ax.set_ylabel('Pressure (GBar)', fontsize=12)\n\n if self.logxVar.get():\n ax.set_xscale('log')\n if self.logyVar.get():\n ax.set_yscale('log')\n\n matplotlib.pyplot.tight_layout()\n\n if not refresh:\n fig.show()\n fig.canvas.draw()\n if self.wm is not None:\n self.wm.addWindow(matplotlib.pyplot.get_current_fig_manager().window)\n self.plots['pressure'] = fig",
"def _setup_plot(x: float, y: float) -> plt.figure:\n LOG.debug(\"Initializing plot.\")\n plt.ion()\n fig = plt.figure(figsize=(x, y), num=\"GlacierFlowModel\")\n fig.patch.set_facecolor(\"black\")\n return fig",
"def combine_plot(qa_out_path,brain_path):\n \n #Get the scan volume of the brain.\n brain_ref = nib.load(brain_path)\n brain_ref_shape = brain_ref.shape[0:3]\n \n plots_list = ['Rotate_Z_axis_000000.png','Rotate_Z_axis_000001.png','Rotate_Z_axis_000002.png',\n 'Rotate_Y_axis_000000.png','Rotate_Y_axis_000001.png','Rotate_Y_axis_000002.png',\n 'Rotate_X_axis_000000.png','Rotate_X_axis_000001.png','Rotate_X_axis_000002.png']\n y_labels = [\"Rotate with Z axis\",\"Rotate with Y axis\",\"Rotate with X axis\"]\n x_labels = [\"angle=0\",\"angle=120\",\"angle=240\"]\n \n #Temporary list to store the image nparray:\n im_arr=[] \n \n fig= plt.figure()\n plt.title(f'QA_tractography. Scan volume = {brain_ref_shape} \\n\\n', fontsize=60,fontweight='bold')\n plt.xticks([])\n plt.yticks([])\n plt.axis(\"off\")\n\n j = 0\n for i in range(9):\n #Load in the nine images into a nparray one by one.\n im_arr = np.array(Image.open(qa_out_path + \"/\" + plots_list[i]))\n #Change the background of the image into black:\n im_arr = np.where(im_arr<=0.01, 255, im_arr) \n ax = fig.add_subplot(3,3,i+1)\n ax.imshow(im_arr,interpolation=\"none\",alpha=0.9)\n \n #Set the X labels and Y labels\n if i<3:\n ax.set_title(x_labels[i],fontsize=60,fontweight='bold')\n if i % 3 == 0:\n ax.set_ylabel(y_labels[j],fontsize=60,fontweight='bold')\n j = j + 1\n plt.xticks([])\n plt.yticks([])\n \n fig.set_size_inches(40, 40, forward = True)\n fig.savefig(qa_out_path + \"/\" + 'qa_tractography.png', format='png')\n\n #Delete the Nine images which used to generate the qa_tractography.png \n for plot in plots_list:\n if os.path.exists(qa_out_path + \"/\" + plot):\n os.remove(qa_out_path + \"/\" + plot)\n else:\n print('No such file generated from streamlines window. Please check if the streamline.trk files is generated from the pipeline correctly or not')",
"def plots():\n out = interactive_output(generate_plots, {'gsize':gridSlider, 'ra':RABox, 'ra':RASlider, 'dec':DECBox, 'dec':DECSlider, 'ang':radBox, 'ang':radSlider, 'style':hexDrop})\n return display(widgrid, out)",
"def update_plot(self,ax):\n self.replot(ax)",
"def show_plots():\n plt.show()",
"def plot(self, *args, **kwargs):\r\n if ('kind' in kwargs and \\\r\n kwargs['kind'] == 'map') or \\\r\n (len(args) > 3 and args[3] == 'map'):\r\n from arcgis.features._data.geodataset.viz import plot\r\n has_wm = True\r\n wm = kwargs.pop('map_widget', None)\r\n if wm is None:\r\n has_wm = False\r\n wm = GIS().map()\r\n if has_wm:\r\n plot(df=self,\r\n map_widget=wm,\r\n name=kwargs.pop('name', \"Feature Collection Layer\"),\r\n renderer_type=kwargs.pop(\"renderer_type\", None),\r\n symbol_type=kwargs.pop('symbol_type', None),\r\n symbol_style=kwargs.pop('symbol_style', None),\r\n col=kwargs.pop('col', None),\r\n colors=kwargs.pop('cmap', None) or kwargs.pop('colors', None) or kwargs.pop('pallette', 'jet'),\r\n alpha=kwargs.pop('alpha', 1),\r\n **kwargs)\r\n return True\r\n else:\r\n return plot(df=self,\r\n map_widget=wm,\r\n name=kwargs.pop('name', \"Feature Collection Layer\"),\r\n renderer_type=kwargs.pop(\"renderer_type\", None),\r\n symbol_type=kwargs.pop('symbol_type', None),\r\n symbol_style=kwargs.pop('symbol_style', None),\r\n col=kwargs.pop('col', None),\r\n colors=kwargs.pop('cmap', None) or kwargs.pop('colors', None) or kwargs.pop('pallette', 'jet'),\r\n alpha=kwargs.pop('alpha', 1),\r\n **kwargs)\r\n if ('kind' in kwargs and \\\r\n kwargs['kind'] == 'map') or \\\r\n (len(args) > 3 and args[3] == 'map') and \\\r\n ('as_graphic' in kwargs and kwargs['as_graphic']):\r\n from arcgis.features import FeatureCollection, FeatureSet\r\n from arcgis import geometry\r\n if self._gis is None:\r\n gis = GIS(set_active=False)\r\n else:\r\n gis = self._gis\r\n if self.sr:\r\n sr = self.sr\r\n else:\r\n sr = self.sr\r\n extent = None\r\n if HASARCPY:\r\n if sr:\r\n wkid = None\r\n if hasattr(sr, 'factoryCode'):\r\n wkid = {'wkid' : sr.factoryCode}\r\n elif isinstance(sr, geometry.SpatialReference):\r\n wkid = self.sr\r\n ext = self.geoextent\r\n extent = {\r\n \"xmin\" : ext[0],\r\n \"ymin\" : ext[1],\r\n \"xmax\" : ext[2],\r\n \"ymax\" : ext[3],\r\n \"spatialReference\" : wkid\r\n }\r\n else:\r\n ext = self.geoextent\r\n extent = {\r\n \"xmin\" : ext[0],\r\n \"ymin\" : ext[1],\r\n \"xmax\" : ext[2],\r\n \"ymax\" : ext[3],\r\n \"spatialReference\" : {'wkid' : 4326}\r\n }\r\n else:\r\n sr = self.sr\r\n if self.sr is None:\r\n sr = {'wkid' : 4326}\r\n\r\n ext = self.geoextent\r\n extent = {\r\n \"xmin\" : ext[0],\r\n \"ymin\" : ext[1],\r\n \"xmax\" : ext[2],\r\n \"ymax\" : ext[3],\r\n \"spatialReference\" : sr\r\n }\r\n if 'map_widget' not in kwargs:\r\n raise Exception(\"map_widget is required to plot the SpatialDataFrame\")\r\n else:\r\n m = kwargs.pop('map_widget')\r\n symbol = kwargs.pop('symbol', None)\r\n popup = kwargs.pop('popup', None)\r\n try:\r\n fs = FeatureSet.from_dict(self.__feature_set__)\r\n m.draw(fs, symbol=symbol, popup=popup)\r\n if extent and \\\r\n isinstance(extent, dict):\r\n m.extent = extent\r\n except:\r\n raise Exception('Could not plot the Spatial DataFrame.')\r\n else:\r\n return super(SpatialDataFrame, self).plot(*args, **kwargs)",
"def plot(self, show=True, **plot_iso_args):\n plot_dict = dict(\n plot_type='isotherm',\n adsorbent_basis=self.adsorbent_basis,\n adsorbent_unit=self.adsorbent_unit,\n loading_basis=self.loading_basis,\n loading_unit=self.loading_unit,\n pressure_unit=self.pressure_unit,\n pressure_mode=self.pressure_mode,\n fig_title=self.material,\n )\n plot_dict.update(plot_iso_args)\n\n axes = plot_iso(self, **plot_dict)\n\n if show:\n plt.show()\n return None\n\n return axes",
"def show():\n plt.show()",
"def show():\n plt.show()",
"def show():\n plt.show()",
"def figure1():\n\n plot_settings = {'y_limits': [-80, -50],\n 'x_limits': None,\n 'y_ticks': [-80, -70, -60, -50],\n 'locator_size': 5,\n 'y_label': 'Voltage (mV)',\n 'x_ticks': [],\n 'scale_size': 20,\n 'x_label': \"\",\n 'scale_loc': 4,\n 'figure_name': 'figure_1',\n 'legend_size': 8,\n 'legend': None,\n 'y_on': True}\n\n t, y = solver(100) # Integrate solution\n plt.figure(figsize=(5, 2)) # Create figure\n plt.plot(t, y[:, 0], 'k-') # Plot solution\n\n \"\"\"\n Annotate plot with figures\n \"\"\"\n plt.gca().annotate('fAHP', xy=(13.5, -65), xytext=(17, -60),\n arrowprops=dict(facecolor='black', shrink=0, headlength=10, headwidth=5, width=1), )\n plt.gca().annotate('ADP', xy=(15.5, -66), xytext=(25, -65),\n arrowprops=dict(facecolor='black', shrink=0, headlength=10, headwidth=5, width=1), )\n plt.gca().annotate('mAHP', xy=(38, -77), xytext=(43, -72),\n arrowprops=dict(facecolor='black', shrink=0, headlength=10, headwidth=5, width=1), )\n alter_figure(plot_settings, close=True) # Alter figure for publication",
"def plot_skyplot(\n self,\n figure_name: str=\"plot_skyplot_{system}.{FIGURE_FORMAT}\",\n ) -> List[pathlib.PosixPath]:\n figure_paths = list()\n \n # Convert azimuth to range 0-360 degree\n azimuth = self.dset.site_pos.azimuth\n idx = azimuth < 0\n azimuth[idx] = 2 * np.pi + azimuth[idx]\n \n # Convert zenith distance from radian to degree\n zenith_distance = np.rad2deg(self.dset.site_pos.zenith_distance)\n \n # Generate x- and y-axis data per system\n for sys in sorted(self.dset.unique(\"system\")):\n x_arrays = []\n y_arrays = []\n labels = []\n \n figure_path = self.figure_dir / figure_name.replace(\"{system}\", sys).replace(\"{FIGURE_FORMAT}\", FIGURE_FORMAT)\n figure_paths.append(figure_path)\n \n for sat in sorted(self.dset.unique(\"satellite\")):\n if not sat.startswith(sys):\n continue\n idx = self.dset.filter(satellite= sat)\n x_arrays.append(azimuth[idx])\n y_arrays.append(zenith_distance[idx])\n labels.append(sat)\n \n # Plot with polar projection\n # TODO: y-axis labels are overwritten after second array plot. Why? What to do?\n plot(\n x_arrays=x_arrays,\n y_arrays=y_arrays,\n xlabel=\"\",\n ylabel=\"\",\n y_unit=\"\",\n labels=labels,\n figure_path=figure_path,\n opt_args={\n \"colormap\": \"hsv\",\n \"figsize\": (7, 7.5),\n \"legend\": True,\n \"legend_ncol\": 6,\n \"legend_location\": \"bottom\",\n \"plot_to\": \"file\",\n \"plot_type\": \"scatter\",\n \"projection\": \"polar\",\n \"title\": f\"Skyplot for {enums.gnss_id_to_name[sys]}\\n Azimuth [deg] / Elevation[deg]\",\n \"xlim\": [0, 2 * np.pi],\n \"ylim\": [0, 90],\n \"yticks\": (range(0, 90, 30)), # sets 3 concentric circles\n \"yticklabels\": (map(str, range(90, 0, -30))), # reverse labels from zenith distance to elevation\n },\n )\n \n return figure_paths",
"def do_plot_abs(the_input):\n pressurefield = None\n\n if the_input is None:\n raise Exception(\"You must supply a pressurefield or world:cueBeamCore2.CueBeamWorld\")\n\n if (type(the_input) is CueBeamWorld):\n world = the_input\n pressurefield = world.rxPlane.pressurefield\n\n if (type(the_input) is numpy.ndarray):\n world = CueBeamWorld() # create new, default world\n pressurefield = the_input\n\n if pressurefield is None:\n raise Exception(\"Something wrong: pressurefield is still None\")\n\n hfig = plt.figure(num=1, figsize=(8, 6), dpi=90, facecolor='white', edgecolor='black')\n\n imgplot = plt.imshow(\n X=numpy.real(pressurefield),\n extent=(\n world.rxPlane.z0, world.rxPlane.z0 + world.rxPlane.nz * world.rxPlane.dz,\n world.rxPlane.y0, world.rxPlane.y0 + world.rxPlane.ny * world.rxPlane.dy\n ),\n # interpolation=\"spline36\",\n interpolation=\"nearest\",\n clim=(0, 8.0),\n origin=\"lower\")\n # end imshow\n plt.set_cmap(\"plasma\") # black-to-yellow color map\n plt.xlabel(\"z-axis[m]\")\n plt.ylabel(\"y-axis[m]\")\n plt.show()",
"def add_orbit_plot(self, plane='XY', target=None, timelim=False, loc=111, \n ls='g.', title=False, invertX=True):\n import matplotlib.pyplot as plt\n \n if not plane.upper() in ('XY','XZ','YZ'):\n raise ValueError(\"{0} is not a valid plot plane.\".format(plane))\n\n fig, ax = set_target(target, loc=loc, figsize=(5,5))\n \n # Variables to map plot plane to correct variables:\n plane = plane.upper()\n ijk = {'X':0, 'Y':1, 'Z':2}\n i = ijk[plane[0]]\n j = ijk[plane[1]]\n\n if not timelim: \n # Set default time limit if none given.\n timelim = [self.time[0], self.time[-1]]\n iMin=0\n iMax=-1\n else:\n # Use timelim to get indices that bound our plots.\n timediff = abs(self.time - timelim[-1])\n iMax = np.nonzero(timediff == timediff.min())[0][0]\n timediff = abs(self.time - timelim[0])\n iMin = np.nonzero(timediff == timediff.min())[0][0]\n \n # Add orbit:\n ax.plot(self['SM_xyz'][iMin:iMax,i], self['SM_xyz'][iMin:iMax,j],ls)\n # Add body:\n add_body(ax,add_night=(plane!='YZ'))\n\n # Axis details:\n ax.axis('equal')\n if plane.upper() in ('XY','XZ') and invertX:\n xmin, xmax = ax.get_xlim()\n if xmin < xmax:\n ax.invert_xaxis()\n ax.set_xlabel('SM %s'%(plane[0]))\n ax.set_ylabel('SM %s'%(plane[1]))\n if title:\n ax.set_title(title)\n grid_zeros(ax)\n set_orb_ticks(ax)\n\n return fig, ax",
"def subplot_to_figure(self):\n if self.format is \"show\":\n plt.show()\n elif self.format is \"png\":\n plt.savefig(self.path + self.filename + \".png\", bbox_inches=\"tight\")",
"def plot(self):\n pass",
"def _close_figure(self):\n if self.disp_images:\n plt.show()\n else:\n plt.close()",
"def plot_pz(pz_inst):\n plt.figure()\n \n plt.plot(pz_inst.z_data, pz_inst.p_data)\n \n plt.savefig(\"pz_figure.png\")\n plt.close()",
"def generate_plot(self):\r\n\t\tx, y = zip(*[p.p for p in self.universe])\r\n\t\tself.ax.cla()\r\n\t\tself.ax.plot(x, y, '.')\r\n\t\tself.ax.set_title('Universe at time: %d' % self.universe.time)\r\n\t\tself.ax.set_xlim([P_MU-4*P_STD, P_MU+4*P_STD])\r\n\t\tself.ax.set_ylim([P_MU-4*P_STD, P_MU+4*P_STD])",
"def show_figure(self):\n pylab.show()",
"def plot(self, A = None):\r\n if A is not None: self.A = A\r\n self.im.set_data(self.A)\r\n self.ax.draw_artist(self.im)\r\n self.fig.canvas.blit(self.ax.bbox)",
"def show():\n\tplt.show()",
"def create_plot():\n\n fig, ax = plt.subplots()\n return fig, ax",
"def goplot(self, sender):\n cube = self.cube_picker.value\n if cube:\n IPython.display.clear_output()\n fig = plt.figure()\n x_name = self.x_coord.value\n y_name = self.y_coord.value\n if (cube.coord(axis='X').name() == x_name and\n cube.coord(axis='Y').name() == y_name):\n projection = cube.coord_system().as_cartopy_projection()\n ax = fig.add_subplot(111, projection=projection)\n ax.coastlines()\n else:\n ax = fig.add_subplot(111)\n conf = self.plot_type.value(cube, ax, coords=[x_name, y_name])\n self.browser = cube_browser.Browser(conf)\n self.browser.on_change(None)\n self.plot_container.children = [self.browser.form]",
"def showPlot2():\n raise NotImplementedError",
"def showPlot1(): \n raise NotImplementedError",
"def iplot(figure_or_data, **plot_options):\n from plotly.basedatatypes import BaseFigure, BaseLayoutType\n\n if \"auto_open\" not in plot_options:\n plot_options[\"auto_open\"] = False\n url = plot(figure_or_data, **plot_options)\n\n if isinstance(figure_or_data, dict):\n layout = figure_or_data.get(\"layout\", {})\n if isinstance(layout, BaseLayoutType):\n layout = layout.to_plotly_json()\n elif isinstance(figure_or_data, BaseFigure):\n layout = figure_or_data.layout.to_plotly_json()\n else:\n layout = {}\n\n embed_options = dict()\n embed_options[\"width\"] = layout.get(\"width\", \"100%\")\n embed_options[\"height\"] = layout.get(\"height\", 525)\n try:\n float(embed_options[\"width\"])\n except (ValueError, TypeError):\n pass\n else:\n embed_options[\"width\"] = str(embed_options[\"width\"]) + \"px\"\n\n try:\n float(embed_options[\"height\"])\n except (ValueError, TypeError):\n pass\n else:\n embed_options[\"height\"] = str(embed_options[\"height\"]) + \"px\"\n\n return tools.embed(url, **embed_options)",
"def finalize_plot(self, artifact_name, attacker_x=None, attacker_y=None):\n # Plot the axis ticks.\n plt.ylim((self.min_y - 10.0, self.max_y + 10.0))\n plt.xlim((self.min_x - 10.0, self.max_x + 10.0))\n plt.xticks([self.min_x + 1000, 0.0, self.max_x], size=15)\n plt.yticks([self.min_y + 1000, 0.0, self.max_y], size=15)\n # Add and place the labels.\n ax = plt.gca()\n plt.ylabel(\"Crossrange (ft)\", size=15)\n plt.xlabel(\"Downrange (ft)\", size=15)\n plt.subplots_adjust(bottom=0.25, left=0.25)\n ax.yaxis.set_label_coords(-0.1, 0.5)\n # Place the plane.\n plane = plt.imread(\"plane.png\").transpose((1, 0, 2))\n width = (self.max_x - self.min_x) / 10\n height = (496.0 / 499.0) * width\n x_start = -(width / 2.0)\n y_start = -(height / 2.0)\n plt.imshow(plane, extent=[x_start, x_start + width,\n y_start, y_start + height], zorder=100)\n plane = np.flip(plane, 1)\n if attacker_x is None:\n attacker_x = self.max_x - (2 * width)\n if attacker_y is None:\n attacker_y = self.max_y - (2 * height)\n red_plane = self.color_plane_png(plane, [1.0, 0, 0], True)\n plt.imshow(red_plane, zorder=100,\n extent=[attacker_x, attacker_x + width,\n attacker_y, attacker_y + height])\n self.record_artifact(plt, artifact_name, \"matplotlib\")\n plt.clf()",
"def apolco(a,minfeh=-3,out=None) :\n apo=np.where((a['TELESCOPE'] == 'apo25m') & (a['RV_FEH']>minfeh) )[0]\n fig=vscat(a[apo],marker='o',density=True)\n lco=np.where((a['TELESCOPE'] == 'lco25m') & (a['RV_FEH']>minfeh) )[0]\n vscat(a[lco],fig=fig,ls=':',marker='+',density=True)\n if out is not None : \n fig[0].savefig(out+'_1.png')\n plt.close()\n i1,i2=match.match(a['APOGEE_ID'][apo],a['APOGEE_ID'][lco])\n print('matched {:d} stars'.format(len(i1)))\n fig,ax=plots.multi(1,2)\n #plots.plotp(ax[0,0],a['SNR'][apo[i1]],a['VHELIO_AVG'][apo[i1]]-a['VHELIO_AVG'][lco[i2]],yr=[-3,3],yt=r'$\\Delta$ VHELIO_AVG',xt='S/N')\n #plots.plotp(ax[0,1],a['SNR'][apo[i1]],a['VHELIO_AVG'][apo[i1]]-a['VHELIO_AVG'][lco[i2]],yr=[-50,50],yt=r'$\\Delta$ VHELIO_AVG',xt='S/N')\n #plots.plotp(ax[1,0],a['SNR'][apo[i1]],a['VSCATTER'][apo[i1]]-a['VSCATTER'][lco[i2]],yr=[-0.5,0.5],yt=r'$\\Delta$ VSCATTER',xt='S/N')\n #plots.plotp(ax[1,1],a['SNR'][apo[i1]],a['VSCATTER'][apo[i1]]-a['VSCATTER'][lco[i2]],yr=[-5,5],yt=r'$\\Delta$ VSCATTER',xt='S/N')\n ax[0].hist(a['VHELIO_AVG'][apo[i1]]-a['VHELIO_AVG'][lco[i2]],bins=np.arange(-0.5,0.5,0.02),histtype='step')\n ax[0].set_xlabel(r'$\\Delta$ VHELIO_AVG')\n ax[1].hist(a['VSCATTER'][apo[i1]]-a['VSCATTER'][lco[i2]],bins=np.arange(-0.25,0.25,0.01),histtype='step')\n ax[1].set_xlabel(r'$\\Delta$ VSCATTER')\n if out is not None : \n fig.savefig(out+'_2.png')\n plt.close()",
"def plot_refresh():\n figure.canvas.draw()",
"def plot_fourier_ampl(fourier_ica_obj, meg_data, W_orig,\n fnout=None, show=True):\n\n\n # ------------------------------------------\n # import necessary modules\n # ------------------------------------------\n from matplotlib import pyplot as plt\n from matplotlib import gridspec as grd\n\n\n # ------------------------------------------\n # generate sources for plotting\n # ------------------------------------------\n fourier_ampl = fourier_ica_obj.get_fourier_ampl(meg_data, W_orig)\n\n\n # ------------------------------------------\n # collect some general information\n # ------------------------------------------\n ncomp = fourier_ampl.shape[0]\n nbins = fourier_ampl.shape[1]\n sfreq_bins = nbins/(fourier_ica_obj.fhigh - fourier_ica_obj.flow)\n\n # define axis/positions for plots\n xaxis_fourier = np.arange(nbins)/sfreq_bins + fourier_ica_obj.flow\n\n\n # ------------------------------------------\n # loop over all activations\n # ------------------------------------------\n plt.ioff()\n plt.figure('Fourier amplitude', figsize=(5, 14))\n nplot = np.min([10, ncomp])\n\n gs = grd.GridSpec(nplot, 1)\n for icomp in range(nplot):\n\n if icomp == nplot-1:\n spines = ['bottom']\n else:\n spines = []\n\n # ----------------------------------------------\n # plot Fourier amplitudes\n # ----------------------------------------------\n p1 = plt.subplot(gs[icomp, 0])\n plt.xlim(fourier_ica_obj.flow, fourier_ica_obj.fhigh)\n plt.ylim(0.0, 1.0)\n adjust_spines(p1, spines, labelsize=13)\n if icomp == nplot-1:\n plt.xlabel('freq [Hz]')\n elif icomp == 0:\n p1.set_title(\"Fourier amplitude (arbitrary units)\")\n\n p1.bar(xaxis_fourier, fourier_ampl[icomp, :], 0.8, color='b', )\n\n # add some information\n IC_number = 'IC#%d' % (icomp+1)\n p1.text(fourier_ica_obj.flow-5, 0.4, IC_number, color='black', rotation=90)\n\n # save image\n if fnout:\n plt.savefig(fnout + '.png', format='png')\n\n # show image if requested\n if show:\n plt.show()\n\n plt.close('Fourier amplitude')\n plt.ion()",
"def show_plot() :\n logger.info(\"Show plot\")\n pylab.axis('equal')\n pylab.xlabel(\"Longitud\")\n pylab.ylabel(\"Latitud\")\n pylab.grid(True)\n pylab.title(\"Product tiles and product source\")\n pylab.show()",
"def main():\n font = {'family' : 'normal',\n 'weight' : 'normal',\n 'size' : 18}\n\n matplotlib.rc('font', **font)\n\n ###Plot overviews\n\n# plot_overview(cube='../combined_maps/12co_pix_2.cm.fits', plotname=\"12co_combined_peak_full.png\",\n# show_shells=False,title=r\"Combined $^{12}$CO Peak T$_{MB}$\",\n# dist=orion_dist, vmin=None, vmax=None, scalebar_color='white',\n# scalebar_pc=1.,recenter=False, ra=83.99191, dec=-5.6611303, radius=0.117325)\n \n plot_overview(plotname=\"../paper/figs/12co_nroonly_peak_full_shells.png\", show_shells=True,\n dist=orion_dist, vmin=None, vmax=None, scalebar_color='black', scale_factor = 1.,\n title=r\"\", shells_highlight=best_shells, circle_style='dotted', circle_linewidth=1.5,\n scalebar_pc=1. #,recenter=False, ra=83.99191, dec=-5.6611303, radius=0.117325\n )\n\n # plot_overview(cube='../combined_maps/12co_pix_2.cm.fits', plotname=\"12co_combined_mom0_cometary.png\",\n # show_shells=False, title=r\"Combined Integrated $^{12}$CO\",\n # dist=orion_dist, scalebar_color='white', pmax=93., mode='mom0',\n # scale_factor=1./1000,\n # scalebar_pc=0.2,recenter=True, ra=83.99191, dec=-5.6611303, radius=0.117325)\n\n # plot_overview(plotname=\"12co_nroonly_mom0_cometary.png\", show_shells=False,\n # dist=orion_dist, scalebar_color='white', pmax=93., mode='mom0',\n # scale_factor=1./1000, title=r\"NRO Integrated $^{12}$CO\",\n # scalebar_pc=0.2,recenter=True, ra=83.99191, dec=-5.6611303, radius=0.117325)\n\n # plot_overview(cube='../combined_maps/12co_pix_2.cm.fits', plotname=\"12co_combined_peak_full_shells.png\",\n # show_shells=True, shells_highlight=shells_score3, title=r\"Combined $^{12}$CO Peak T$_{MB}$\",\n # dist=orion_dist, vmin=None, vmax=None, scalebar_color='white', circle_style='dotted',\n # scalebar_pc=1.,recenter=False, ra=83.99191, dec=-5.6611303, radius=0.117325)\n\n # return\n\n mips_l1641_file = '../catalogs/MIPS_L1641a_24um.fits'\n mips_onc_file = '../catalogs/MIPS_ONC_24um.fits'\n\n irac1_l1641_file = '../catalogs/IRAC_L1641_ch1_merged_clean.fits'\n irac1_onc_file = '../catalogs/IRAC_ONC_ch1_merged_clean.fits'\n\n irac2_l1641_file = '../catalogs/IRAC_L1641_ch2_merged_clean.fits'\n irac2_onc_file = '../catalogs/IRAC_ONC_ch2_merged_clean.fits'\n\n irac4_l1641_file = '../catalogs/IRAC_L1641_ch4_merged_clean_northup.fits'\n irac4_onc_file = '../catalogs/IRAC_ONC_ch4_merged_clean_northup.fits'\n\n planck_herschel_file = '../catalogs/planck_herschel.fits'\n\n region_file = '../shell_candidates/AllShells.reg'\n vrange_file = '../shell_candidates/AllShells_vrange.txt'\n shell_list = get_shells(region_file=region_file, velocity_file=vrange_file)\n\n obaf_file = 'stars_obaf.txt'\n yso_file = \"../catalogs/spitzer_orion.fit\"\n\n obaf = ascii.read(obaf_file)\n obaf_ra, obaf_dec, obaf_label = np.array(obaf['RA']), np.array(obaf['DEC']), np.array([sp.strip(\"b'\") for sp in obaf['SP_TYPE']])\n yso = fits.open(yso_file)[1].data\n yso_ra, yso_dec, yso_label = yso['RAJ2000'], yso['DEJ2000'], yso['Cl']\n\n # for nshell in range(19,43):\n # shell = shell_list[nshell-1]\n # ra, dec, radius = shell.ra.value, shell.dec.value, shell.radius.value\n\n # #Check whether shell is in each mips image coverage.\n # l1641_xy = WCS(mips_l1641_hdu).all_world2pix(ra, dec, 0)\n \n # if (l1641_xy[0] >= 0) & (l1641_xy[0] <= mips_l1641_hdu.shape[1]) & \\\n # (l1641_xy[1] >= 0) & (l1641_xy[1] <= mips_l1641_hdu.shape[0]):\n # hdu = mips_l1641_hdu\n # else:\n # hdu = mips_onc_hdu\n\n # plot_stamp(map=hdu, ra=ra, dec=dec, radius=radius, circle_color='red',\n # pad_factor=1.5, contour_map=None, contour_levels=5., source_ra=None, source_dec=None, source_lists=None, \n # source_colors='cyan', plotname='{}shell{}_stamp.png'.format('MIPS',nshell), return_fig=False,\n # stretch='linear', plot_simbad_sources=False, dist=orion_dist, cbar_label=r'counts',\n # auto_scale=True, auto_scale_mode='min/max', auto_scale_pad_factor=1., vmin=0, vmax=3000)\n\n\n #cube_file = '../nro_maps/13CO_20161011_FOREST-BEARS_xyb_spheroidal_dV0.11kms_YS.fits'\n cube_file = '../nro_maps/12CO_20161002_FOREST-BEARS_spheroidal_xyb_grid7.5_0.099kms.fits'\n ir_l1641_hdu = fits.open(irac4_l1641_file)[0]\n ir_onc_hdu = fits.open(irac4_onc_file)[0]\n\n spec_cube = SpectralCube.read(cube_file)\n ra_grid = spec_cube.spatial_coordinate_map[1].to(u.deg).value\n dec_grid = spec_cube.spatial_coordinate_map[0].to(u.deg).value\n vel_grid = spec_cube.spectral_axis\n pad_factor = 1.5\n\n #plot_overview(show_shells=True)\n #plot_overview(plotname=\"12co_nro_peak.png\", show_shells=False)\n #plot_overview(cube=\"/Volumes/Untitled/13co_pix_2.cm.fits\", plotname=\"13co_combined_peak.png\", show_shells=False)\n #return\n #channel_vmax = [12.9, 14]\n for nshell in range(17,18):\n shell = shell_list[nshell-1]\n ra, dec, radius = shell.ra.value, shell.dec.value, shell.radius.value\n\n l1641_xy = WCS(ir_l1641_hdu).wcs_world2pix(ra, dec, 0)\n #print(l1641_xy)\n if (l1641_xy[0] >= 0) & (l1641_xy[0] <= ir_l1641_hdu.shape[1]) & \\\n (l1641_xy[1] >= 0) & (l1641_xy[1] <= ir_l1641_hdu.shape[0]):\n ir_hdu = ir_l1641_hdu\n else:\n ir_hdu = ir_onc_hdu\n\n #Extract sub_cube around shell.\n subcube_mask = (abs(ra_grid - ra) < radius * pad_factor) &\\\n (abs(dec_grid - dec) < radius * pad_factor)\n sub_cube = spec_cube.with_mask(subcube_mask).minimal_subcube().spectral_slab(shell.vmin, shell.vmax)\n\n #Integrate between vmin and vmax.\n mom0_hdu = sub_cube.moment0().hdu\n\n # mask_inshell = (abs(ra_grid - ra) < radius) &\\\n # (abs(dec_grid - dec) < radius)\n # subcube_inshell = spec_cube.with_mask(mask_inshell).minimal_subcube().spectral_slab(shell.vmin, shell.vmax)\n # mom0_hdu_inshell = subcube_inshell.moment0().hdu\n\n #Calculate contour levels.\n empty_channel = spec_cube.closest_spectral_channel(500*u.Unit('m/s'))\n sigma = np.nanstd(spec_cube[empty_channel][subcube_mask]).value\n #print(\"sigma: {}\".format(sigma))\n delta_vel = (sub_cube.spectral_extrema[1] - sub_cube.spectral_extrema[0]).value\n #print(\"delta_vel: {}\".format(delta_vel))\n mom0_sigma = sigma * delta_vel\n #print(mom0_sigma) \n #contour_levels = np.linspace(5.*mom0_sigma, np.nanmax(mom0_hdu_inshell.data), 12)\n contour_levels = np.linspace(28.*mom0_sigma, 45.*mom0_sigma, 6 )\n\n #Get source coordinates.\n\n\n # plot_stamp(map=ir_hdu, ra=ra, dec=dec, radius=radius, circle_color='red',\n # pad_factor=pad_factor, contour_map=mom0_hdu, contour_levels=contour_levels, contour_color='white',\n # plotname='{}shell{}_{}{}to{}_stamp.png'.format('8µm', nshell, \"12CO\", shell.vmin.value, shell.vmax.value),\n # return_fig=False,\n # stretch='linear', plot_simbad_sources=False, dist=orion_dist,\n # auto_scale=True, auto_scale_mode='median', auto_scale_pad_factor=0.8, auto_scale_nsigma=4.,\n # cbar_label=\"Counts\", cmap='inferno',\n # source_ra=[obaf_ra, yso_ra], source_dec=[obaf_dec, yso_dec],\n # source_colors=['white', 'red'], source_markers=['*', 'None'], source_sizes=[300,50],\n # source_labels=[obaf_label, yso_label], dpi=300\n # )\n\n #cube_file = \"../nro_maps/12CO_20161002_FOREST-BEARS_spheroidal_xyb_grid7.5_0.099kms.fits\"\n \n \n # plot_channels(cube=cube_file, ra=ra, dec=dec, radius=radius,\n # source_lists=None, stretch='linear', pad_factor=1.5, vel_min=shell.vmin.value, vel_max=shell.vmax.value,\n # plotname='12co_channels_shell'+str(nshell)+'.png', chan_step=2, plot_simbad_sources=False,\n # vmin=None, vmax=None, max_chans=12,\n # #cbar_label=\"Counts\",\n # source_ra=[obaf_ra, yso_ra], source_dec=[obaf_dec, yso_dec],\n # source_colors=['white', 'red'], source_markers=['*', '+'], source_sizes=[200,15], dpi=300)\n\n # angle = 90*u.deg\n # pv = plot_pv(cube=cube_file, ra_center=shell.ra, dec_center=shell.dec,\n # vel=[shell.vmin - 1*u.km/u.s, shell.vmax + 1*u.km/u.s], length=shell.radius*4.,\n # width=7.5*u.arcsec, angle=angle,\n # pad_factor=1., plotname='12co_pv_shell'+str(nshell)+'_angle'+str(angle.value)+'.png',\n # stretch='linear', auto_scale=True, dpi=900.)\n \n\n\n #simbad_brightstars(output='stars_obaf.txt', output_format='ascii', replace_ra='deg')\n\n\n #movie(test=False, labels=False) \n\n #cube_file = '../nro_maps/12CO_20161002_FOREST-BEARS_spheroidal_xyb_grid7.5_0.099kms.fits'\n # region_file = '../nro_maps/SouthShells.reg'\n # N = 2 # Number of shell candidates to plot\n # shell_list = get_shells(region_file=region_file)\n\n # cube_file = \"../nro_maps/12CO_20161002_FOREST-BEARS_spheroidal_xyb_grid7.5_0.099kms.fits\"\n # #cube_file = \"../nro_maps/13CO_20161011_FOREST-BEARS_xyb_spheroidal_dV0.11kms_YS.fits\"\n\n # for n in range(1,2):\n # shell = shell_list[n]\n # for deg in np.linspace(0, 180, 13):\n # angle = deg*u.deg\n # pv = plot_pv(cube=cube_file, ra_center=shell.ra, dec_center=shell.dec,\n # vel=[4*u.km/u.s, 8*u.km/u.s], length=shell.radius*2.*4.,\n # width=7.5*u.arcsec, angle=105*u.deg,\n # pad_factor=1., plotname='12co_pv_shell'+str(n+1)+'_angle'+str(angle.value)+'morev.png', return_subplot=True,\n # stretch='linear', auto_scale=True)\n\n # cube_file = \"../nro_maps/12CO_20161002_FOREST-BEARS_spheroidal_xyb_grid7.5_0.099kms.fits\"\n # for n in range(N):\n # shell = shell_list[n]\n # plot_channels(cube=cube_file, ra=shell.ra.value, dec=shell.dec.value, radius=shell.radius.value,\n # source_lists=None, stretch='linear', pad_factor=1.5, vel_min=shell.vmin.value, vel_max=shell.vmax.value,\n # plotname='12co_channels_shell'+str(n+1)+'.png', chan_step=2, plot_simbad_sources=True, simbad_color='blue')\n\n # cube_file = \"../nro_maps/13CO_20161011_FOREST-BEARS_xyb_spheroidal_dV0.11kms_YS.fits\"\n # for n in range(N):\n # shell = shell_list[n]\n # plot_channels(cube=cube_file, ra=shell.ra.value, dec=shell.dec.value, radius=shell.radius.value,\n # source_lists=None, stretch='linear', pad_factor=1.5, vel_min=shell.vmin.value, vel_max=shell.vmax.value,\n # plotname='13co_channels_shell'+str(n+1)+'.png', chan_step=2, plot_simbad_sources=True, simbad_color='blue')",
"def __init__(self):\n self.fig = pl.figure(1,figsize=(8,6), dpi=80 , frameon = True , facecolor = '0.75' , edgecolor = 'w')\n self.fig.add_subplot(111 , axisbg = 'w' , projection = 'rectilinear') #if you want to add axes on particular place: fig.add_axes([0.15, 0.1, 0.7, 0.3]) where -> [begin , bottom to start axes , width , height ]\n self.separated = True #if we have a list and need to plot the plots separated",
"async def plot(self, new=False) -> None:\n self._logger.debug(\"running\")\n self.figure.clear()\n self.figure.set_tight_layout(True)\n num_plots = len(self._plots)\n axes = None\n for i in range(num_plots):\n plot = self._plots[i]\n name = plot[0]\n active = plot[2]\n if active:\n if i == 0:\n axes = self.figure.add_subplot(1, 1, 1)\n axes.tick_params(axis='x', labelrotation=30)\n axes.set_ylabel(name, color='#1f77b4')\n await sleep(.001)\n if not new:\n await create_task(self.plot_device_data(axes, name))\n else:\n alt_axes = axes.twinx()\n alt_axes.set_ylabel(name, color='#ff7f0e')\n alt_axes.tick_params(axis='y', labelcolor='#ff7f0e')\n alt_axes.set_yticks(np.arange(0, 6, step=1))\n await sleep(.001)\n if not new:\n await create_task(self.plot_device_data(alt_axes, name))\n\n if not new:\n self.add_vert_lines()\n await sleep(.001)\n self.figure.canvas.draw()\n self._logger.debug(\"done\")",
"def liveplot(x, y, xlim, ylim, title):\n plt.plot(x,y,'b.')\n plt.xlim(xlim)\n plt.ylim(ylim)\n plt.xlabel('North-South Axis')\n plt.ylabel('East-West Axis')\n plt.title(title)\n plt.show()",
"def plot_trace(self):\n az.plot_trace(self.ifd_)",
"def show_plot(times, zones, combined, labels):\r\n\r\n #Inform user of current action\r\n print(\"Loading plots...\")\r\n\r\n #If less than 25 aggregated data points, draw bar plots\r\n if len(times) < 25:\r\n plot_drawer = draw_bar_plot\r\n #Else, draw line plots\r\n else:\r\n plot_drawer = draw_line_plot\r\n\r\n #If zone energy usage should be shown combined, draw combined plot\r\n if combined:\r\n draw_combined(times, zones, plot_drawer, labels)\r\n #Else, draw plots for each zone\r\n else:\r\n draw_zones(times, zones, plot_drawer, labels)\r\n\r\n\r\n #Print instructions for how to continue\r\n print(\"Close plots window to continue...\", end=\"\\n\\n\")\r\n\r\n\r\n #Show finished plot\r\n #NOTE: Blocks thread until GUI is closed\r\n plt.show()",
"def plot(self,displayplt = True,saveplt = False,savepath='',polarplt=True, dbdown = False):\n plt.figure()\n\n #legacy beamprofile data is a 1-D array of the peak negative pressure\n pnp = self.pnp\n\n if dbdown:\n pnp = 20.0*np.log10(pnp/np.max(pnp))\n else:\n pnp = pnp*1e-6\n\n if polarplt:\n figure1 = plt.polar(self.angle * np.pi / 180.0, pnp)\n else:\n figure1 = plt.plot(self.angle, pnp)\n #the latest beamprofile data should be a 2-D array of the hydrophone output\n plt.xlabel('Angle (degrees)')\n if dbdown:\n plt.ylabel('Peak Negative Pressure (dB Max)')\n else:\n plt.ylabel('Peak Negative Pressure (MPa)')\n plt.title(self.txdr)\n if displayplt:\n plt.show()\n if saveplt:\n if savepath=='':\n #prompt for a save path using a default filename\n defaultfn = self.txdr+'_'+self.collectiondate+'_'+self.collectiontime+'_beamprofile.png'\n savepath = tkinter.filedialog.asksaveasfilename(initialfile=defaultfn, defaultextension='.png')\n plt.savefig(savepath)\n return figure1, savepath",
"def figure8():\n\n plot_settings = {'y_limits': [15, 60],\n 'x_limits': None,\n 'y_ticks': [20, 30, 40, 50, 60],\n 'locator_size': 5,\n 'y_label': 'ISI (ms)',\n 'x_ticks': [],\n 'scale_size': 0,\n 'x_label': \"\",\n 'scale_loc': 4,\n 'figure_name': 'figure_8',\n 'legend': ['First ISI', 'Second ISI'],\n 'legend_size': 8,\n 'y_on': True,\n 'legend_location': 4}\n\n g_t_bars = np.linspace(0.02, 0.2, 10)\n isi = np.zeros((len(g_t_bars), 2))\n\n for ix, g_t_bar in enumerate(g_t_bars):\n t, y = solver(200, t_start=15, duration=260, g_t_bar=g_t_bar)\n t_spike, f = spike_times(t, y[:, 0])\n isi[ix, 0] = t_spike[1] - t_spike[0]\n isi[ix, 1] = t_spike[2] - t_spike[1]\n\n plt.subplot(2, 2, 1) # Generate subplot 1 (top left)\n plt.plot(g_t_bars, isi[:, 0], c='k', marker='o', fillstyle='none', linestyle='-')\n plt.plot(g_t_bars, isi[:, 1], c='k', marker='s', fillstyle='none', linestyle='dotted')\n\n \"\"\"\n Annotate plot\n \"\"\"\n plt.gca().arrow(g_t_bars[3], 35, 0, 11, head_width=0, head_length=0, fc='k', ec='k')\n plt.gca().arrow(g_t_bars[3], 46, -0.01, 0, head_width=2, head_length=0.01, fc='k', ec='k')\n plt.gca().arrow(g_t_bars[3], 35, 0.01, 0, head_width=2, head_length=0.01, fc='k', ec='k')\n plt.gca().annotate(\"Acceleration\", (0.1, 35), fontsize=8)\n plt.gca().annotate(\"Adaptation\", (0.01, 46), fontsize=8)\n alter_figure(plot_settings)\n\n plt.subplot(2, 2, 2) # Generate subplot 2 (top right)\n g_n_bars = np.linspace(0.02, 0.2, 10)\n isi = np.zeros((len(g_t_bars), 2))\n for ix, g_n_bar in enumerate(g_n_bars):\n t, y = solver(200, g_n_bar=g_n_bar, duration=260, t_start=15, g_t_bar=0.02)\n t_spike, f = spike_times(t, y[:, 0])\n\n isi[ix, 0] = t_spike[1] - t_spike[0]\n isi[ix, 1] = t_spike[2] - t_spike[1]\n plt.plot(g_t_bars, isi[:, 0], c='k', marker='o', fillstyle='none', linestyle='-')\n plt.plot(g_t_bars, isi[:, 1], c='k', marker='s', fillstyle='none', linestyle='dotted')\n\n \"\"\"\n Annotate plot\n \"\"\"\n plt.gca().arrow(g_n_bars[3], 30, 0, 10, head_width=0, head_length=0, fc='k', ec='k')\n plt.gca().arrow(g_n_bars[3], 40, -0.01, 0, head_width=2, head_length=0.01, fc='k', ec='k')\n plt.gca().arrow(g_n_bars[3], 30, 0.01, 0, head_width=2, head_length=0.01, fc='k', ec='k')\n plt.gca().annotate(\"Acceleration\", (0.1, 30), fontsize=8)\n plt.gca().annotate(\"Adaptation\", (0.015, 40), fontsize=8)\n plot_settings['y_ticks'] = []\n plot_settings['y_label'] = \"\"\n plot_settings['y_on'] = False\n plot_settings['legend_location'] = 4\n alter_figure(plot_settings)\n\n plt.subplot(2, 2, 3) # Generate subplot 3 (bottom left)\n g_t_bars = np.linspace(0.02, 0.16, 8)\n isi = np.zeros((len(g_t_bars), 2))\n for ix, g_t_bar in enumerate(g_t_bars):\n t, y = solver(200, g_t_bar=g_t_bar, duration=260, t_start=15, ca_type=1)\n t_spike, f = spike_times(t, y[:, 0])\n\n isi[ix, 0] = t_spike[1] - t_spike[0]\n isi[ix, 1] = t_spike[2] - t_spike[1]\n plt.plot(g_t_bars, isi[:, 0], c='k', marker='o', fillstyle='none', linestyle='-')\n plt.plot(g_t_bars, isi[:, 1], c='k', marker='s', fillstyle='none', linestyle='dotted')\n\n \"\"\"\n Annotate plot\n \"\"\"\n plt.gca().arrow(g_t_bars[2], 25, -0.02, 0, head_width=2, head_length=0.01, fc='k', ec='k')\n plt.gca().arrow(g_t_bars[4], 25, 0.02, 0, head_width=2, head_length=0.01, fc='k', ec='k')\n plt.gca().annotate(\"Adaptation\", (0.06, 25), fontsize=8)\n\n plot_settings['y_limits'] = [0, 45]\n plot_settings['y_ticks'] = [0, 10, 20, 30, 40]\n plot_settings['locator_size'] = 5\n plot_settings['y_label'] = 'ISI (ms)'\n plot_settings['y_on'] = True\n plot_settings['legend_location'] = 3\n alter_figure(plot_settings)\n\n plt.subplot(2, 2, 4)\n g_n_bars = np.linspace(0.02, 0.16, 8)\n isi = np.zeros((len(g_t_bars), 2))\n for ix, g_n_bar in enumerate(g_n_bars):\n t, y = solver(200, duration=260, t_start=15, g_n_bar=g_n_bar, g_t_bar=0.02, ca_type=2)\n t_spike, f = spike_times(t, y[:, 0])\n\n isi[ix, 0] = t_spike[1] - t_spike[0]\n isi[ix, 1] = t_spike[2] - t_spike[1]\n plt.plot(g_t_bars, isi[:, 0], c='k', marker='o', fillstyle='none', linestyle='-')\n plt.plot(g_t_bars, isi[:, 1], c='k', marker='s', fillstyle='none', linestyle='dotted')\n\n \"\"\"\n Annotate plot\n \"\"\"\n plt.gca().arrow(g_n_bars[2], 20, -0.02, 0, head_width=2, head_length=0.01, fc='k', ec='k')\n plt.gca().arrow(g_n_bars[4], 20, 0.02, 0, head_width=2, head_length=0.01, fc='k', ec='k')\n plt.gca().annotate(\"Adaptation\", (0.06, 20), fontsize=8)\n\n plot_settings['y_ticks'] = []\n plot_settings['y_label'] = ''\n plot_settings['y_on'] = False\n plot_settings['legend_location'] = 2\n alter_figure(plot_settings, close=True)",
"def plot(self):\n\t\tplot_chain(self.database_path, self.temp_folder)\n\t\tplot_density(self.database_path, self.temp_folder, self.cal_params)",
"def plotFigures(self, path):\n self.width = 500\n self.height = 400\n\n if self.pathIds[path] == 0:\n self.lineFigurePath = path\n self.lineFigure.load(path)\n self.lineFigure = self.checkFigureSize(self.lineFigure)\n self.lineFigureScene.addPixmap(QtGui.QPixmap.fromImage( self.lineFigure))\n x, y = self.getWidgetPos(self.displayLineFigure)\n w, h = self.getWidgetDims(self.lineFigure)\n self.displayLineFigure.setGeometry(QtCore.QRect(x,y,w,h))\n self.displayLineFigure.fitInView(self.displayLineFigure.sceneRect() , self.ratioOption)\n elif self.pathIds[path] == 1:\n self.barFigurePath = path\n self.barFigure.load(path)\n self.barFigureScene.addPixmap(QtGui.QPixmap.fromImage(self.barFigure))\n self.barFigure = self.checkFigureSize(self.barFigure)\n\n x, y = self.getWidgetPos(self.displayBarFigure)\n w, h = self.getWidgetDims(self.barFigure)\n self.displayBarFigure.setGeometry(QtCore.QRect(x,y,w,h))\n self.displayBarFigure.fitInView(self.barFigureScene.sceneRect(), self.ratioOption)\n else:\n if self.barFigurePath is None:\n self.barFigurePath = path\n self.barFigure.load(path)\n self.barFigure = self.checkFigureSize(self.barFigure)\n self.barFigureScene.addPixmap(QtGui.QPixmap.fromImage( self.barFigure))\n x, y = self.getWidgetPos(self.displayBarFigure)\n w, h = self.getWidgetDims(self.barFigure)\n self.displayBarFigure.setGeometry(QtCore.QRect(x,y,w,h))\n self.displayBarFigure.fitInView(self.barFigureScene.sceneRect() , self.ratioOption)\n elif self.lineFigurePath is None:\n self.lineFigurePath = path\n self.lineFigure.load(path)\n self.lineFigure = self.checkFigureSize(self.lineFigure)\n self.lineFigureScene.addPixmap(QtGui.QPixmap.fromImage( self.lineFigure))\n x, y = self.getWidgetPos(self.displayLineFigure)\n w, h = self.getWidgetDims(self.lineFigure)\n self.displayLineFigure.setGeometry(QtCore.QRect(x,y,w,h))\n self.displayLineFigure.fitInView(self.lineFigureScene.sceneRect (), self.ratioOption)",
"def setup_figure(self):\n \n # connect ui widgets to measurement/hardware settings or functions\n self.ui.start_pushButton.clicked.connect(self.start)\n self.ui.interrupt_pushButton.clicked.connect(self.interrupt)\n self.settings.save_h5.connect_to_widget(self.ui.save_h5_checkBox)\n self.settings.save_movie.connect_to_widget(self.ui.save_movie_checkBox)\n \n # Set up pyqtgraph graph_layout in the UI\n self.graph_layout=pg.GraphicsLayoutWidget()\n self.ui.plot_groupBox.layout().addWidget(self.graph_layout)\n \n self.aux_graph_layout=pg.GraphicsLayoutWidget()\n self.ui.aux_plot_groupBox.layout().addWidget(self.aux_graph_layout)\n \n self.camera_layout=pg.GraphicsLayoutWidget()\n self.ui.camera_groupBox.layout().addWidget(self.camera_layout)\n\n # Create PlotItem object (a set of axes) \n \n self.plot1 = self.graph_layout.addPlot(row=1,col=1,title=\"Lick\")\n self.plot2 = self.graph_layout.addPlot(row=2,col=1,title=\"breathing\")\n\n # Create PlotDataItem object ( a scatter plot on the axes )\n self.breathing_plot = self.plot2.plot([0])\n self.lick_plot_0 = self.plot1.plot([0])\n self.lick_plot_1 = self.plot1.plot([1]) \n \n self.lick_plot_0.setPen('y')\n self.lick_plot_1.setPen('g')\n \n self.T=np.linspace(0,10,10000)\n self.k=0\n \n self.camera_view=pg.ViewBox()\n self.camera_layout.addItem(self.camera_view)\n self.camera_image=pg.ImageItem()\n self.camera_view.addItem(self.camera_image)",
"def embed_matplotlib(self):",
"def init_fig():\r\n # Set the axis and plot titles\r\n orbit, = ax.plot([], [], [])\r\n satellite, = ax.plot([], [], [], 'o', color='red')\r\n earth, = ax.plot([], [], [], 'o', color='green')\r\n time_text.set_text('')\r\n ax.set_title(Title_3D, fontsize=22)\r\n ax.set_xlim3d([-lim, lim])\r\n ax.set_xlabel('I\\n[km]')\r\n ax.set_ylim3d([-lim, lim])\r\n ax.set_ylabel('J\\n[km]')\r\n ax.set_zlim3d([-lim, lim])\r\n ax.set_zlabel('K\\n[km]')\r\n # plot Earth\r\n\r\n u = np.linspace(0, 2 * np.pi, 100)\r\n v = np.linspace(0, np.pi, 100)\r\n x = R_moon * np.outer(np.cos(u), np.sin(v))\r\n y = R_moon * np.outer(np.sin(u), np.sin(v))\r\n z = R_moon * np.outer(np.ones(np.size(u)), np.cos(v))\r\n ax.plot_wireframe(x, y, z, color=\"grey\", label=\"Moon\", linewidth=0.3, rstride=7, cstride=7)\r\n # Must return the list of artists, but we use a pass\r\n # through so that they aren't created multiple times\r\n return orbit, satellite, earth, time_text",
"def show(self, view = None, save = False, savename = None, cmap = None):\n\n # define the style\n if cmap == None:\n style = PlotStyle(cmap_name = 'macplus')\n else:\n style = PlotStyle(cmap_name = cmap)\n \n # default is skymap\n if view == None:\n view = self._view_options[0]\n else:\n if view not in self._view_options:\n print ('ERROR:', 'view option', view, 'is not defined')\n return\n\n # sky map\n if view == self._view_options[0]:\n\n # figure\n fig = plt.figure(figsize = (12, 6))\n ax = plt.gca()\n \n # skymap\n skymap = AllSkyMap(projection = 'hammer', lon_0 = 0, lat_0 = 0)\n\n \n # define RA and DEC over all coordinates\n rightascensions = np.linspace(-np.pi, np.pi, self.num_points)\n declinations = self.declination\n \n cmap = style.cmap\n norm_proj = matplotlib.colors.Normalize(self.exposure_factor.min(),\n self.exposure_factor.max())\n\n # plot the exposure map\n # NB: use scatter as plot and pcolormesh have bugs in shiftdata methods\n for dec, proj in np.nditer([declinations, self.exposure_factor]):\n decs = np.tile(dec, self.num_points)\n c = SkyCoord(ra = rightascensions * u.rad, \n dec = decs * u.rad, frame = 'icrs')\n lon = c.galactic.l.deg\n lat = c.galactic.b.deg\n skymap.scatter(lon, lat, latlon = True, linewidth = 3, \n color = cmap(norm_proj(proj)), alpha = 0.7)\n\n # plot exposure boundary\n self.draw_exposure_lim(skymap)\n \n # add labels\n skymap.draw_standard_labels(style.cmap, style.textcolor)\n\n # add colorbar\n self._exposure_colorbar(style)\n\n # decplot\n elif view == self._view_options[1]:\n\n # plot for all decs\n \n plt.figure()\n plt.plot(self.declination, self.exposure_factor, linewidth = 5, alpha = 0.7)\n plt.xlabel('$\\delta$');\n plt.ylabel('m($\\delta$)');\n\n\n if save:\n plt.savefig(savename, dpi = 1000,\n bbox_inches = 'tight', pad_inches = 0.5)",
"def construct_plot(self, amprtb):\n self.fig, [[self.ax1, self.ax2], [self.ax3, self.ax4]] = \\\n plt.subplots(2, 2, figsize=(10, 10),\n subplot_kw={'projection': self.projection})\n ind1, ind2 = amprtb._get_scan_indices(\n self.scanrange, self.timerange, False)\n\n # 10 GHz plot\n stuff = amprtb.plot_ampr_track(\n var='10'+self.chan, latrange=self.latrange,\n lonrange=self.lonrange, parallels=self.parallels,\n meridians=self.meridians, title='', wmts_layer=self.wmts_layer,\n clevs=self.clevs, cmap=self.cmap, show_track=self.show_track,\n maneuver=self.maneuver, scanrange=self.scanrange,\n show_grid=self.show_grid, equator=self.equator,\n show_qc=self.show_qc, resolution=self.resolution,\n projection=self.projection, ax=self.ax1, fig=self.fig,\n verbose=self.verbose, timerange=self.timerange, return_flag=True)\n self.ax1.set_title(self.make_title('10', amprtb, ind1, ind2))\n\n # 19 GHz plot\n amprtb.plot_ampr_track(\n var='19'+self.chan, latrange=self.latrange,\n lonrange=self.lonrange, parallels=self.parallels,\n meridians=self.meridians, title='', wmts_layer=self.wmts_layer,\n clevs=self.clevs, cmap=self.cmap, show_track=self.show_track,\n maneuver=self.maneuver, scanrange=self.scanrange,\n show_grid=self.show_grid, equator=self.equator,\n show_qc=self.show_qc, resolution=self.resolution,\n projection=self.projection, ax=self.ax2, fig=self.fig,\n verbose=self.verbose, timerange=self.timerange)\n self.ax2.set_title(self.make_title('19', amprtb, ind1, ind2))\n\n # 37 GHz plot\n amprtb.plot_ampr_track(\n var='37'+self.chan, latrange=self.latrange,\n lonrange=self.lonrange, parallels=self.parallels,\n meridians=self.meridians, title='', wmts_layer=self.wmts_layer,\n clevs=self.clevs, cmap=self.cmap, show_track=self.show_track,\n maneuver=self.maneuver, scanrange=self.scanrange,\n show_grid=self.show_grid, equator=self.equator,\n show_qc=self.show_qc, resolution=self.resolution,\n projection=self.projection, ax=self.ax3, fig=self.fig,\n verbose=self.verbose, timerange=self.timerange)\n self.ax3.set_title(self.make_title('37', amprtb, ind1, ind2))\n\n # 85 GHz plot\n amprtb.plot_ampr_track(\n var='85'+self.chan, latrange=self.latrange,\n lonrange=self.lonrange, parallels=self.parallels,\n meridians=self.meridians, title='', wmts_layer=self.wmts_layer,\n clevs=self.clevs, cmap=self.cmap, show_track=self.show_track,\n maneuver=self.maneuver, scanrange=self.scanrange,\n show_grid=self.show_grid, equator=self.equator,\n show_qc=self.show_qc, resolution=self.resolution,\n projection=self.projection, ax=self.ax4, fig=self.fig,\n verbose=self.verbose, timerange=self.timerange)\n self.ax4.set_title(self.make_title('85', amprtb, ind1, ind2))\n\n # plt.tight_layout()\n return True",
"def __plot(name, x, y):\n import matplotlib.pyplot as plt\n\n plt.plot(x, y)\n plt.xlabel('elements')\n plt.ylabel('time (seconds)')\n plt.savefig(\"{}\".format(name))",
"def save_local_interactive(fig: go.Figure, name):\n from plotly.offline import plot\n plot(fig, filename=name)",
"def plot_acquisition_frame(self, ax1, ax2, ax3):\n model_to_plot = self.model\n\n return self.plot2frames_acquisition(ax1, ax2, ax3,\n self.acquisition.space.get_bounds(),\n model_to_plot.model.X.shape[1],\n model_to_plot.model,\n model_to_plot.model.X,\n model_to_plot.model.Y,\n self.acquisition.acquisition_function, \n self._compute_next_evaluations())",
"def plot_overview(cube=nro_12co,\n region_file='../shell_candidates/AllShells.reg', mode='peak', plotname='12co_peak_shells.png',\n interactive=False, show_shells=True, shells_highlight=None, dist=orion_dist, vmin=None, vmax=None,\n scalebar_color=\"white\", scalebar_pc = 1., scale_factor=1., pmin=0.25,\n pmax=99.75, cbar_label=r\"Peak T$_{\\rm MB}$ [K]\",\n circle_color='white', circle_linewidth=1, circle_style=\"solid\", return_fig=False, show=True,\n title=r\"$^{12}$CO Peak T$_{MB}$\", recenter=False, ra=None, dec=None, radius=None):\n try:\n cube = SpectralCube.read(cube)\n except ValueError:\n pass\n\n if mode == \"peak\":\n image = (cube.max(axis=0) * scale_factor).hdu\n \n\n if mode == \"mom0\":\n image = (cube.moment0() * scale_factor).hdu\n\n\n\n fig = FITSFigure(image)\n if show:\n fig.show_colorscale(cmap='viridis', vmin=vmin, vmax=vmax, pmin=pmin,\n pmax=pmax, interpolation='none')\n fig.tick_labels.set_yformat(\"dd:mm\")\n fig.tick_labels.set_xformat(\"hh:mm\")\n #fig.hide_yaxis_label()\n #fig.hide_ytick_labels()\n plt.title(title)\n plt.xlabel(\"RA (J2000)\")\n plt.ylabel(\"DEC (J2000)\")\n\n if show_shells:\n shell_list = get_shells(region_file=region_file)\n for i, shell in enumerate(shell_list):\n if shells_highlight:\n if i+1 in shells_highlight:\n fig.show_circles(shell.ra.value, shell.dec.value, shell.radius.value, linestyle='solid', edgecolor=circle_color,\n facecolor='none', linewidth=3)\n else:\n fig.show_circles(shell.ra.value, shell.dec.value, shell.radius.value, linestyle=circle_style, edgecolor=circle_color,\n facecolor='none', linewidth=circle_linewidth)\n else:\n fig.show_circles(shell.ra.value, shell.dec.value, shell.radius.value, linestyle=circle_style, edgecolor=circle_color,\n facecolor='none', linewidth=circle_linewidth)\n\n #RECENTER\n if recenter:\n fig.recenter(ra, dec, radius)\n\n\n #SCALEBAR\n fig.add_scalebar(206265 * scalebar_pc / (dist.to(u.pc).value * 3600), color=scalebar_color)\n fig.scalebar.set_label(\"{} pc\".format(scalebar_pc))\n\n fig.add_colorbar()\n cb = fig.colorbar\n cb.set_axis_label_text(cbar_label)\n\n if return_fig:\n return fig\n else:\n fig.save(plotname, dpi=600)",
"def plotOverlays(self):\n if self.overlayFluxSurfaces:\n self.plotFluxSurfaces()\n if self.overlayMagneticAxis:\n self.plotMagneticAxis()\n if self.overlaySeparatrix:\n self.plotSeparatrix()\n if self.overlayWallCrossSection:\n self.plotWallCrossSection()",
"def plot(self,displayplt = True,saveplt = False,savepath='',polarplt=True, dbdown = False):\n plt.figure()\n\n #legacy beamprofile data is a 1-D array of the peak negative pressure\n if len(self.hydoutput.shape)<2:\n pnp = self.hydoutput\n else:\n sensitivity = hyd_calibration(self.cfreq)\n pnp = -1*np.min(self.hydoutput,1)/sensitivity\n\n if dbdown:\n pnp = 20.0*np.log10(pnp/np.max(pnp))\n else:\n pnp = pnp*1e-6\n\n figure1 = plt.plot(self.depth, pnp)\n #the latest beamprofile data should be a 2-D array of the hydrophone output\n plt.xlabel('Depth (mm)')\n if dbdown:\n plt.ylabel('Peak Negative Pressure (dB Max)')\n else:\n plt.ylabel('Peak Negative Pressure (MPa)')\n plt.title(self.txdr)\n if displayplt:\n plt.show()\n if saveplt:\n if savepath=='':\n #prompt for a save path using a default filename\n defaultfn = self.txdr+'_'+self.collectiondate+'_'+self.collectiontime+'_depthprofile.png'\n savepath = tkinter.filedialog.asksaveasfilename(initialfile=defaultfn, defaultextension='.png')\n plt.savefig(savepath)\n return figure1, savepath",
"def plot(self):\n\t\t\t\n\t\tfig,p1=_plt.subplots(4,sharex=True)\n\t\tp1[0].plot(self.time*1e3,self.eRogA,label='Rogowski A')\n\t\tp1[1].plot(self.time*1e3,self.eRogB,label='Rogowski B')\n\t\tp1[2].plot(self.time*1e3,self.eRogC,label='Rogowski C')\n\t\tp1[3].plot(self.time*1e3,self.eRogD,label='Rogowski D')\n\t\t_plot.finalizeSubplot(p1,xlabel='Time (ms)',ylabel='Current (A)')\n\t\t_plot.finalizeFigure(fig,title=self.title)\n\t\t\n\t\treturn p1",
"def _UpdatePlot( self ):\n self._BusyDoOp( self._UpdatePlotImpl )",
"def on_plot(self, event=None):\n data_id, theory_id, state_id = self.set_data_helper()\n self.parent.plot_data(data_id=data_id,\n state_id=state_id,\n theory_id=theory_id,\n append=False)\n self.enable_remove_plot()",
"def plot(self):\n\t\tself.plotOfHeatingCurrent().plot()",
"def plot(self):\n fx = self.fitness_functions(self.archive)\n n = len(fx[0])\n\n if n == 2:\n plt.xlabel(\"F1\")\n plt.ylabel(\"F2\")\n plt.suptitle(\"Pareto Front\")\n plt.scatter(fx[:,0], fx[:,1], label='Archive')\n plt.show()\n elif n == 3:\n plt.figure()\n ax = plt.axes(projection='3d')\n ax.scatter(fx[:, 0], fx[:, 1], fx[:, 2])\n ax.set_xlabel(\"F1\")\n ax.set_ylabel(\"F2\")\n ax.set_zlabel(\"F3\")\n plt.suptitle(\"Pareto Front of Archive\")\n plt.show()\n else:\n print(\"Cannot Print Multi-Dimensional Front greater than 3D\")",
"def plot_avg_ics_meg_space(fourier_ica_obj, meg_data, W_orig,\n A_orig, fnout=None, show=True):\n\n # ------------------------------------------\n # import necessary modules\n # ------------------------------------------\n from matplotlib import pyplot as plt\n from matplotlib import gridspec as grd\n\n\n # ------------------------------------------\n # generate sources for plotting\n # ------------------------------------------\n rec_signal_avg, orig_avg = fourier_ica_obj.get_reconstructed_signal(meg_data, W_orig, A_orig)\n\n\n # ------------------------------------------\n # collect some general information\n # ------------------------------------------\n tpost = fourier_ica_obj.tpre + fourier_ica_obj.win_length_sec\n ncomp, nchan, ntsl = rec_signal_avg.shape\n\n # define axis/positions for plots\n xaxis_time = np.arange(ntsl)/fourier_ica_obj.sfreq + fourier_ica_obj.tpre\n ylim_meg = [np.min(orig_avg), np.max(orig_avg)]\n\n\n # ------------------------------------------\n # loop over all activations\n # ------------------------------------------\n plt.ioff()\n plt.figure('averaged MEG signals', figsize=(5, 14))\n nplot = np.min([10, ncomp])\n\n gs = grd.GridSpec(nplot, 1)\n for icomp in range(nplot):\n\n if icomp == nplot-1:\n spines = ['bottom']\n else:\n spines = []\n\n # ----------------------------------------------\n # plot back-transformed signals\n # ----------------------------------------------\n p1 = plt.subplot(gs[icomp, 0])\n plt.xlim(fourier_ica_obj.tpre, tpost)\n plt.ylim(ylim_meg)\n adjust_spines(p1, spines, labelsize=13)\n if icomp == nplot-1:\n plt.xlabel('time [s]')\n elif icomp == 0:\n p1.set_title(\"reconstructed MEG-signals\")\n p1.plot(xaxis_time, orig_avg.T, 'b', linewidth=0.5)\n p1.plot(xaxis_time, rec_signal_avg[icomp, :, :].T, 'r', linewidth=0.5)\n\n # add some information\n IC_number = 'IC#%d' % (icomp+1)\n p1.text(1.1*fourier_ica_obj.tpre-0.1*tpost, 0.4*ylim_meg[1] + 0.6*ylim_meg[0],\n IC_number, color='black', rotation=90)\n\n # save image\n if fnout:\n plt.savefig(fnout + '.png', format='png')\n\n # show image if requested\n if show:\n plt.show()\n\n plt.close('averaged MEG signals')\n plt.ion()",
"def plot(self,ax=None,return_ax=False,cartopy_proj=None,prop={},map_prop={}):\n \n #Create instance of plot object\n self.plot_obj = TrackPlot()\n \n if self.basin in ['east_pacific','west_pacific','south_pacific','australia','all']:\n self.plot_obj.create_cartopy(proj='PlateCarree',central_longitude=180.0)\n else:\n self.plot_obj.create_cartopy(proj='PlateCarree',central_longitude=0.0)\n \n \n #Plot storm\n return_ax = self.plot_obj.plot_season(self,ax=ax,return_ax=return_ax,prop=prop,map_prop=map_prop)\n \n #Return axis\n if ax != None or return_ax == True: return return_ax",
"def showPlot3():\n raise NotImplementedError",
"def plot(self,tickers = None,variable = \"close\"):\n data = self.get_dataframe(tickers,variable)\n fig = data.iplot(world_readable=False,asFigure=True)\n iplot(fig)",
"def plot(frame, clipped, auto, lag, threshold, freq, save):\n fig, axes = plt.subplots(4, constrained_layout=True)\n fig.set_size_inches(8.0, 8.0)\n fig.canvas.set_window_title('Excercise 4')\n\n ax_frame, ax_clipped, ax_auto, ax_freq = axes\n\n time = np.linspace(0, frame.size / SAMPLE_RATE, num=frame.size)\n for ax in axes:\n ax.set_xlabel('time [s]')\n ax.set_ylabel('y')\n\n\n ax_frame.plot(time, frame)\n ax_clipped.plot(time, clipped)\n\n ax_auto.plot(auto)\n ax_auto.axvline(threshold, color='black', label='Threshold')\n ax_auto.stem([lag[0]], [lag[1]], linefmt='r-', basefmt=None, label='Lag')\n\n ax_freq.plot(freq[0], 'g-', label='mask-on')\n ax_freq.plot(freq[1], 'r-', label='mask-off')\n\n ax_auto.legend(loc=1)\n ax_freq.legend(loc=0)\n\n ax_frame.set_title('Maskon frame')\n ax_clipped.set_title('Central clipping with 70%')\n ax_auto.set_title('Autocorrelation')\n ax_freq.set_title('Primary frequencies of frames')\n\n ax_auto.set_xlabel('frames')\n ax_freq.set_xlabel('frames')\n\n ax_freq.set_ylabel('f0')\n\n if save:\n save_figure(fig, 'ex4')\n else:\n plt.show()",
"def plot_area(self, plot_area):\n\n self.container['plot_area'] = plot_area",
"def plot(self, ax=..., *, name=..., **kwargs):\n ...",
"def plot(self, area=False):\n for b in self.buildings:\n b.plot()",
"def builtin_plot(self, **kwargs):\n self.gp.plot(**kwargs)\n return",
"def worker_plot(fname):\n with Database() as base:\n _filter = base.get_filter(fname)\n plt.clf()\n plt.plot(_filter.trans_table[0], _filter.trans_table[1], color='k')\n plt.xlim(_filter.trans_table[0][0], _filter.trans_table[0][-1])\n plt.minorticks_on()\n plt.xlabel('Wavelength [nm]')\n plt.ylabel('Relative transmission')\n plt.title(\"{} filter\".format(fname))\n plt.tight_layout()\n plt.savefig(\"{}.pdf\".format(fname))",
"def do_plots(self, i, axis1, axis2, finder):\n fig_index = finder.plot_phase_space_matplotlib(\"%s [mm]\"%axis1, \"%s [MeV/c]\"%axis2)\n name = os.path.join(self.plot_dir,\n \"tune_\"+str(i)+\"_\"+axis1+\"_phase-space\")\n fig = matplotlib.pyplot.figure(fig_index)\n for format in [\"png\",]:\n fig.savefig(name+\".\"+format)\n matplotlib.pyplot.close(fig_index)\n\n fig_index = finder.plot_cholesky_space_matplotlib()\n name = os.path.join(self.plot_dir,\n \"tune_\"+str(i)+\"_\"+axis1+\"_cholesky-space\")\n fig = matplotlib.pyplot.figure(fig_index)\n for format in [\"png\",]:\n fig.savefig(name+\".\"+format)\n matplotlib.pyplot.close(fig_index)",
"def _plot_arm(self):\n fig, axs = plt.subplots()\n fig.show()\n axs.cla()\n axs.axis([-1, 2.5, -1, 2.5])\n axs.plot([0], [0], 'o')\n config_plots = []\n for t_step in range(0, int(self._t_sim / self._dt) + 1, 1000):\n axs.plot([0, self._x_1[t_step]], [0, self._y_1[t_step]])\n axs.plot(self._x_1[t_step], self._y_1[t_step], 'o')\n axs.plot(\n [self._x_1[t_step], self._x_2[t_step]],\n [self._y_1[t_step], self._y_2[t_step]]\n )\n axs.plot(self._x_2[t_step], self._y_2[t_step], 'o')\n axs.plot(\n [self._x_2[t_step], self._x_e[t_step]],\n [self._y_2[t_step], self._y_e[t_step]]\n )\n axs.plot(self._x_e[t_step], self._y_e[t_step], 'ro')\n axs.plot(\n self._obj_coords_plot[t_step, 0, 0],\n self._obj_coords_plot[t_step, 1, 0], 'g+')\n axs.plot(\n self._obj_coords_plot[t_step, 0, 1],\n self._obj_coords_plot[t_step, 1, 1], 'g.')\n axs.plot(\n self._obj_coords_plot[t_step, 0, 2],\n self._obj_coords_plot[t_step, 1, 2], 'g.')\n axs.plot(\n self._obj_coords_plot[t_step, 0, 3],\n self._obj_coords_plot[t_step, 1, 3], 'g.')\n axs.plot(\n self._obj_coords_plot[t_step, 0, 4],\n self._obj_coords_plot[t_step, 1, 4], 'g.')\n plt.axis('off')\n plt.pause(1 / self._plot_fps)\n fig.canvas.draw()\n image = np.frombuffer(fig.canvas.tostring_rgb(), dtype='uint8')\n config_plots.append(image.reshape(\n fig.canvas.get_width_height()[::-1] + (3, )))\n\n # Draw and create image\n return config_plots",
"def show(self):\n plt.show()",
"def figure(self):\n if self._figure is None:\n\n self._figure, ax = plt.subplots(nrows=1, dpi=self._dpi)\n if self._verbose:\n print(f\" Figure dpi set to {self._dpi}\")\n\n self._figure.set_size_inches(self._size)\n if self._verbose:\n print(\" Figure size set to \" + str(self._size) + \" inches.\")\n\n for model in self._models:\n xs, ys, _ = zip(*model._nodes)\n\n for face in model._elements:\n xf = tuple(xs[k - 1] for k in face) # 1-base index to 0-base index\n yf = tuple(ys[k - 1] for k in face)\n # plt.fill(\n # xf,\n # yf,\n # linestyle=\"dotted\",\n # edgecolor=\"magenta\",\n # alpha=0.5,\n # facecolor=\"gray\",\n # )\n plt.fill(\n xf,\n yf,\n alpha=model._alpha,\n edgecolor=model._edgecolor,\n facecolor=model._facecolor,\n linestyle=model._linestyle,\n linewidth=model._linewidth,\n )\n\n if self._xticks:\n ax.set_xticks(self._xticks)\n\n if self._yticks:\n ax.set_yticks(self._yticks)\n\n if self._xlim:\n ax.set_xlim(self._xlim)\n\n if self._ylim:\n ax.set_ylim(self._ylim)\n\n if self._xlabel:\n ax.set_xlabel(self._xlabel)\n\n if self._ylabel:\n ax.set_ylabel(self._ylabel)\n\n # set frame on or off based on the Bool \"frame\" in .json input\n ax.set_frame_on(b=self._frame)\n if len(self._tick_params) > 0:\n ax.tick_params(**self._tick_params)\n\n if self._display:\n plt.show()\n\n if self._serialize:\n self.serialize(self._folder, self._file)\n\n plt.close(\"all\")\n self._figure = None",
"def plot_fppy(self,LAXIS,xbl,xbr,ybu,ybd,ilg): \n\t\t\n # load x GRID\n grd1 = self.xzn0\n\t\n # load DATA to plot\n plt1 = self.fppy\n\t\t\t\t\n # create FIGURE\n plt.figure(figsize=(7,6))\n\t\t\n # format AXIS, make sure it is exponential\n plt.gca().yaxis.get_major_formatter().set_powerlimits((0,0))\t\t\n\n # set plot boundaries \n to_plot = [plt1]\t\t\n self.set_plt_axis(LAXIS,xbl,xbr,ybu,ybd,to_plot)\n\t\t\t\t\n # plot DATA \n plt.title(r'pressure flux y')\n plt.plot(grd1,plt1,color='brown',label = r'f$_{py}$')\n\n # define and show x/y LABELS\n setxlabel = r\"r (cm)\"\n setylabel = r\"$f_{py}$ (erg cm$^{-2}$ s$^{-1}$)\"\n plt.xlabel(setxlabel)\n plt.ylabel(setylabel)\n\t\t\n # show LEGEND\n plt.legend(loc=ilg,prop={'size':18})\n\n # display PLOT\n plt.show(block=False)\n\n # save PLOT\n plt.savefig('RESULTS/'+self.data_prefix+'mean_fppy.png')",
"def plot(self):\n\t\tself.plotOfIP().plot()",
"def figures(self):\n if np.size(self.iceicehorizons_depth1)>0:\n fig, ax = mpl.subplots()\n if self.site1.archive == 'icecore':\n mpl.xlabel(self.site1.label+' ice age (yr b1950)')\n else:\n mpl.xlabel(self.site1.label+' age (yr b1950)')\n if self.site2.archive == 'icecore':\n mpl.ylabel(self.site2.label+' ice age (yr b1950)')\n else:\n mpl.ylabel(self.site2.label+' age (yr b1950)')\n if np.size(self.iceicehorizons_depth1) > 0:\n if pccfg.show_initial:\n mpl.plot(self.site1.fct_age_init(self.iceicehorizons_depth1),\n self.site2.fct_age_init(self.iceicehorizons_depth2),\n color=pccfg.color_init, linestyle='', marker='o', markersize=2,\n label=\"Initial\")\n mpl.plot(self.site1.fct_age_model(self.iceicehorizons_depth1),\n self.site2.fct_age_model(self.iceicehorizons_depth2),\n color=pccfg.color_mod, linestyle='', marker='o', markersize=2,\n label=\"Prior\")\n mpl.errorbar(self.site1.fct_age(self.iceicehorizons_depth1),\n self.site2.fct_age(self.iceicehorizons_depth2), color=pccfg.color_opt,\n xerr=np.zeros(np.size(self.iceicehorizons_depth1)),\n linestyle='', marker='o', markersize=2,\n label=\"Posterior\")\n xstart = self.site1.fct_age(self.iceicehorizons_depth1)-self.iceicehorizons_sigma/2\n ystart = self.site2.fct_age(self.iceicehorizons_depth2)+self.iceicehorizons_sigma/2\n for i in range(np.size(self.iceicehorizons_depth1)):\n mpl.arrow(xstart[i], ystart[i], self.iceicehorizons_sigma[i],\n -self.iceicehorizons_sigma[i], color=pccfg.color_opt,\n width=0.0, head_length=0.0, head_width=0.0)\n x_low, x_up, y_low, y_up = mpl.axis()\n# x_low = self.site1.age_top\n# y_low = self.site2.age_top\n# mpl.axis((x_low, x_up, y_low, y_up))\n rangefig = np.array([min(x_low, y_low), max(x_up, y_up)])\n mpl.plot(rangefig, rangefig, color=pccfg.color_obs, label='perfect agreement', zorder=0)\n mpl.legend(loc=\"best\")\n ax.set_aspect('equal')\n if self.site1.archive == 'icecore' and self.site2.archive == 'icecore':\n printed_page = PdfPages(pccfg.datadir+self.label+'/ice_ice_synchro.pdf')\n elif self.site1.archive == 'icecore' or self.site2.archive == 'icecore':\n printed_page = PdfPages(pccfg.datadir+self.label+'/ice_synchro.pdf')\n else:\n printed_page = PdfPages(pccfg.datadir+self.label+'/synchro.pdf')\n printed_page.savefig(fig)\n printed_page.close()\n if not pccfg.show_figures:\n mpl.close()\n\n if self.site1.archive == 'icecore' and self.site2.archive == 'icecore':\n if np.size(self.airairhorizons_depth1)>0:\n fig, ax = mpl.subplots()\n mpl.xlabel(self.site1.label+' air age (yr b1950)')\n mpl.ylabel(self.site2.label+' air age (yr b1950)')\n if np.size(self.airairhorizons_depth1) > 0:\n if pccfg.show_initial:\n mpl.plot(self.site1.fct_airage_init(self.airairhorizons_depth1),\n self.site2.fct_airage_init(self.airairhorizons_depth2),\n color=pccfg.color_init,\n linestyle='',\n marker='o', markersize=2, label=\"Initial\")\n mpl.plot(self.site1.fct_airage_model(self.airairhorizons_depth1),\n self.site2.fct_airage_model(self.airairhorizons_depth2),\n color=pccfg.color_mod,\n linestyle='', marker='o', markersize=2,\n label=\"Prior\")\n mpl.errorbar(self.site1.fct_airage(self.airairhorizons_depth1),\n self.site2.fct_airage(self.airairhorizons_depth2),\n color=pccfg.color_opt,\n xerr=np.zeros_like(self.airairhorizons_sigma),\n linestyle='', marker='o', markersize=2,\n label=\"Posterior\")\n xstart = self.site1.fct_airage(self.airairhorizons_depth1)-\\\n self.airairhorizons_sigma/2\n ystart = self.site2.fct_airage(self.airairhorizons_depth2)+\\\n self.airairhorizons_sigma/2\n for i in range(np.size(self.airairhorizons_depth1)):\n mpl.arrow(xstart[i], ystart[i], self.airairhorizons_sigma[i],\n -self.airairhorizons_sigma[i], color=pccfg.color_opt,\n width=0.0, head_length=0.0, head_width=0.0)\n x_low, x_up, y_low, y_up = mpl.axis()\n# x_low = self.site1.age_top\n# y_low = self.site2.age_top\n# mpl.axis((x_low, x_up, y_low, y_up))\n rangefig = np.array([min(x_low, y_low), max(x_up, y_up)])\n mpl.plot(rangefig, rangefig, color=pccfg.color_obs, label='perfect agreement',\n zorder=0)\n mpl.legend(loc=\"best\")\n ax.set_aspect('equal')\n printed_page = PdfPages(pccfg.datadir+self.label+'/air_air_synchro.pdf')\n printed_page.savefig(fig)\n printed_page.close()\n if not pccfg.show_figures:\n mpl.close()\n\n if self.site2.archive == 'icecore':\n if np.size(self.iceairhorizons_depth1)>0:\n fig, ax = mpl.subplots()\n if self.site1.archive == 'icecore':\n mpl.xlabel(self.site1.label+' ice age (yr b1950)')\n else:\n mpl.xlabel(self.site1.label+' age (yr b1950)')\n mpl.ylabel(self.site2.label+' air age (yr b1950)')\n if np.size(self.iceairhorizons_depth1) > 0:\n if pccfg.show_initial:\n mpl.plot(self.site1.fct_age_init(self.iceairhorizons_depth1),\n self.site2.fct_airage_init(self.iceairhorizons_depth2),\n color=pccfg.color_init,\n linestyle='',\n marker='o', markersize=2, label=\"Initial\")\n mpl.plot(self.site1.fct_age_model(self.iceairhorizons_depth1),\n self.site2.fct_airage_model(self.iceairhorizons_depth2),\n color=pccfg.color_mod,\n linestyle='', marker='o', markersize=2,\n label=\"Prior\")\n mpl.errorbar(self.site1.fct_age(self.iceairhorizons_depth1),\n self.site2.fct_airage(self.iceairhorizons_depth2),\n color=pccfg.color_opt,\n xerr=np.zeros_like(self.iceairhorizons_sigma),\n linestyle='', marker='o', markersize=2,\n label=\"Posterior\")\n xstart = self.site1.fct_age(self.iceairhorizons_depth1)-\\\n self.iceairhorizons_sigma/2\n ystart = self.site2.fct_airage(self.iceairhorizons_depth2)+\\\n self.iceairhorizons_sigma/2\n for i in range(np.size(self.iceairhorizons_depth1)):\n mpl.arrow(xstart[i], ystart[i], self.iceairhorizons_sigma[i],\n -self.iceairhorizons_sigma[i], color=pccfg.color_opt,\n width=0.0, head_length=0.0, head_width=0.0) \n x_low, x_up, y_low, y_up = mpl.axis()\n# x_low = self.site1.age_top\n# y_low = self.site2.age_top\n# mpl.axis((x_low, x_up, y_low, y_up))\n rangefig = np.array([min(x_low, y_low), max(x_up, y_up)])\n mpl.plot(rangefig, rangefig, color=pccfg.color_obs, label='perfect agreement',\n zorder=0)\n mpl.legend(loc=\"best\")\n ax.set_aspect('equal')\n if self.site1.archive == 'icecore':\n printed_page = PdfPages(pccfg.datadir+self.label+'/ice_air_synchro.pdf')\n else:\n printed_page = PdfPages(pccfg.datadir+self.label+'/air_synchro.pdf')\n printed_page.savefig(fig)\n printed_page.close()\n if not pccfg.show_figures:\n mpl.close()\n\n if self.site1.archive == 'icecore':\n if np.size(self.airicehorizons_depth1)>0:\n fig, ax = mpl.subplots()\n mpl.xlabel(self.site1.label+' air age (yr b1950)')\n if self.site2.archive == 'icecore':\n mpl.ylabel(self.site2.label+' ice age (yr b1950)')\n else:\n mpl.ylabel(self.site2.label+' age (yr b1950)')\n if np.size(self.airicehorizons_depth1) > 0:\n if pccfg.show_initial:\n mpl.plot(self.site1.fct_airage_init(self.airicehorizons_depth1),\n self.site2.fct_age_init(self.airicehorizons_depth2),\n color=pccfg.color_init,\n linestyle='', marker='o', markersize=2, label=\"Initial\")\n mpl.plot(self.site1.fct_airage_model(self.airicehorizons_depth1),\n self.site2.fct_age_model(self.airicehorizons_depth2),\n color=pccfg.color_mod,\n linestyle='', marker='o', markersize=2,\n label=\"Prior\")\n mpl.errorbar(self.site1.fct_airage(self.airicehorizons_depth1),\n self.site2.fct_age(self.airicehorizons_depth2),\n color=pccfg.color_opt,\n xerr=np.zeros_like(self.airicehorizons_sigma),\n linestyle='', marker='o', markersize=2,\n label=\"Posterior\")\n xstart = self.site1.fct_airage(self.airicehorizons_depth1)-\\\n self.airicehorizons_sigma/2\n ystart = self.site2.fct_age(self.airicehorizons_depth2)+\\\n self.airicehorizons_sigma/2\n for i in range(np.size(self.airicehorizons_depth1)):\n mpl.arrow(xstart[i], ystart[i], self.airicehorizons_sigma[i],\n -self.airicehorizons_sigma[i], color=pccfg.color_opt,\n width=0.0, head_length=0.0, head_width=0.0)\n x_low, x_up, y_low, y_up = mpl.axis()\n# x_low = self.site1.age_top\n# y_low = self.site2.age_top\n# mpl.axis((x_low, x_up, y_low, y_up))\n rangefig = np.array([min(x_low, y_low), max(x_up, y_up)])\n mpl.plot(rangefig, rangefig, color=pccfg.color_obs, label='perfect agreement')\n mpl.legend(loc=\"best\")\n ax.set_aspect('equal')\n if self.site2.archive == 'icecore':\n printed_page = PdfPages(pccfg.datadir+self.label+'/air_ice_synchro.pdf')\n else:\n printed_page = PdfPages(pccfg.datadir+self.label+'/air_synchro.pdf')\n printed_page.savefig(fig)\n printed_page.close()\n if not pccfg.show_figures:\n mpl.close()",
"def main():\n save = False\n show = True\n\n #hd_parameter_plots = HDparameterPlots(save=save)\n #hd_parameter_plots.flow_parameter_distribution_for_non_lake_cells_for_current_HD_model()\n #hd_parameter_plots.flow_parameter_distribution_current_HD_model_for_current_HD_model_reprocessed_without_lakes_and_wetlands()\n #hd_parameter_plots.flow_parameter_distribution_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs()\n #hd_parameter_plots.flow_parameter_distribution_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_no_tuning()\n #ice5g_comparison_plots = Ice5GComparisonPlots(save=save)\n #ice5g_comparison_plots.plotLine()\n #ice5g_comparison_plots.plotFilled()\n #ice5g_comparison_plots.plotCombined()\n #ice5g_comparison_plots.plotCombinedIncludingOceanFloors()\n #flowmapplot = FlowMapPlots(save)\n #flowmapplot.FourFlowMapSectionsFromDeglaciation()\n #flowmapplot.Etopo1FlowMap()\n #flowmapplot.ICE5G_data_all_points_0k()\n #flowmapplot.ICE5G_data_all_points_0k_no_sink_filling()\n #flowmapplot.ICE5G_data_all_points_0k_alg4_two_color()\n #flowmapplot.ICE5G_data_all_points_21k_alg4_two_color()\n #flowmapplot.Etopo1FlowMap_two_color()\n #flowmapplot.Etopo1FlowMap_two_color_directly_upscaled_fields()\n #flowmapplot.Corrected_HD_Rdirs_FlowMap_two_color()\n #flowmapplot.ICE5G_data_ALG4_true_sinks_21k_And_ICE5G_data_ALG4_true_sinks_0k_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_Etopo1_ALG4_sinkless_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_Etopo1_ALG4_true_sinks_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_sinkless_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_true_sinks_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_corr_orog_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_no_true_sinks_corr_orog_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Corrected_HD_Rdirs_And_ICE5G_HD_as_data_ALG4_true_sinks_0k_directly_upscaled_fields_FlowMap_comparison()\n #flowmapplot.Upscaled_Rdirs_vs_Directly_Upscaled_fields_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplot.Ten_Minute_Data_from_Virna_data_ALG4_corr_orog_downscaled_lsmask_no_sinks_21k_vs_0k_FlowMap_comparison()\n #flowmapplot.Upscaled_Rdirs_vs_Corrected_HD_Rdirs_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n flowmapplotwithcatchment = FlowMapPlotsWithCatchments(save)\n #flowmapplotwithcatchment.Upscaled_Rdirs_vs_Corrected_HD_Rdirs_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.compare_present_day_and_lgm_river_directions_with_catchments_virna_data_plus_tarasov_style_orog_corrs_for_both()\n #flowmapplotwithcatchment.compare_present_day_river_directions_with_catchments_virna_data_with_vs_without_tarasov_style_orog_corrs()\n #flowmapplotwithcatchment.compare_lgm_river_directions_with_catchments_virna_data_with_vs_without_tarasov_style_orog_corrs()\n #flowmapplotwithcatchment.Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.upscaled_rdirs_with_and_without_tarasov_upscaled_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.\\\n #upscaled_rdirs_with_and_without_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.\\\n #Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.\\\n #Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_glcc_olson_lsmask_0k_FlowMap_comparison()\n #flowmapplotwithcatchment.compare_present_day_and_lgm_river_directions_with_catchments_ICE5G_plus_tarasov_style_orog_corrs_for_both()\n #flowmapplotwithcatchment.compare_present_day_and_lgm_river_directions_with_catchments_ICE6G_plus_tarasov_style_orog_corrs_for_both()\n #flowmapplotwithcatchment.compare_ICE5G_and_ICE6G_with_catchments_tarasov_style_orog_corrs_for_both()\n #flowmapplotwithcatchment.compare_river_directions_with_dynriver_corrs_and_MERIThydro_derived_corrs()\n #flowmapplotwithcatchment.compare_river_directions_with_dynriver_corrs_and_MERIThydro_derived_corrs_original_ts()\n flowmapplotwithcatchment.compare_river_directions_with_dynriver_corrs_and_MERIThydro_derived_corrs_new_ts_10min()\n outflowplots = OutflowPlots(save)\n #outflowplots.Compare_Upscaled_Rdirs_vs_Directly_Upscaled_fields_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_as_HD_data_ALG4_sinkless_all_points_0k()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_as_HD_data_ALG4_true_sinks_all_points_0k()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_sinkless_all_points_0k_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_true_sinks_all_points_0k_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_corr_orog_all_points_0k_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_corr_orog_downscaled_ls_mask_all_points_0k_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_Etopo1_ALG4_sinkless_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_Etopo1_ALG4_true_sinks_directly_upscaled_fields()\n #outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_plus_tarasov_upscaled_srtm30_ALG4_corr_orog_0k_directly_upscaled_fields()\n #outflowplots.Compare_Original_Corrections_vs_Upscaled_MERIT_DEM_0k()\n outflowplots.Compare_Original_Corrections_vs_Upscaled_MERIT_DEM_0k_new_truesinks()\n #outflowplots.Compare_Original_Corrections_vs_Upscaled_MERIT_DEM_0k_new_truesinks_individual_rivers()\n #outflowplots.Compare_ICE5G_with_and_without_tarasov_upscaled_srtm30_ALG4_corr_orog_0k_directly_upscaled_fields()\n #hd_output_plots = HDOutputPlots()\n #hd_output_plots.check_water_balance_of_1978_for_constant_forcing_of_0_01()\n #hd_output_plots.plot_comparison_using_1990_rainfall_data()\n #hd_output_plots.plot_comparison_using_1990_rainfall_data_adding_back_to_discharge()\n #coupledrunoutputplots = CoupledRunOutputPlots(save=save)\n #coupledrunoutputplots.ice6g_rdirs_lgm_run_discharge_plot()\n #coupledrunoutputplots.extended_present_day_rdirs_lgm_run_discharge_plot()\n #coupledrunoutputplots.ocean_grid_extended_present_day_rdirs_vs_ice6g_rdirs_lgm_run_discharge_plot()\n #coupledrunoutputplots.extended_present_day_rdirs_vs_ice6g_rdirs_lgm_echam()\n #coupledrunoutputplots.extended_present_day_rdirs_vs_ice6g_rdirs_lgm_mpiom_pem()\n #lake_plots = LakePlots()\n #lake_plots.plotLakeDepths()\n #lake_plots.LakeAndRiverMap()\n #lake_plots.LakeAndRiverMaps()\n if show:\n plt.show()",
"def UpdatePlot(self):\n\n if self.first_time:\n for ID, plt in self.plotIDs.iteritems():\n if plt:\n tmp = FellesBaseClass.FindInstance(ID)\n self.plot_panel.oplot(\n np.array(tmp.data['time']),\n np.array(tmp.data['data']),\n draw = True,\n side ='left',\n label = tmp['label'],\n color = tmp['color'],\n xlabel = None, ylabel = None, y2label = None,\n title = None,\n dy = None,\n ylog_scale = False,\n xmin = None, xmax = None, ymin = None, ymax = None,\n refresh = True,\n show_legend= True, legend_loc='ur', legend_on= True,\n delay_draw = False,\n marker = 'None', markersize = None,\n autoscale=True,\n linewidth = 3, # default 2\n drawstyle = 'line', style = 'solid',\n grid = True,\n bgcolor= None, framecolor= None, gridcolor= None,\n labelfontsize= 10, # default 9\n legendfontsize= 12, # default 7\n fullbox=None, # 'box', 'open', 'bottom'\n axes_style=None,\n zorder=None,\n )\n self.first_time = False\n\n else:\n i = 0\n for ID,plt in self.plotIDs.iteritems():\n if plt:\n tmp = FellesBaseClass.FindInstance(ID)\n self.plot_panel.update_line(\n i,\n np.array(tmp.data['time']),\n np.array(tmp.data['data']),\n draw=True,\n )\n i += 1\n\n self.plot_panel.set_xylims(\\\n [\\\n floor( min( [ min( FellesBaseClass.FindInstance(ID).data['time'] )\\\n for ID,plt in self.plotIDs.iteritems() if plt ] ) ),\\\n ceil( max( [ max( FellesBaseClass.FindInstance(ID).data['time'] )\\\n for ID,plt in self.plotIDs.iteritems() if plt ] ) ),\\\n floor( min( [ min( FellesBaseClass.FindInstance(ID).data['data'] )\\\n for ID,plt in self.plotIDs.iteritems() if plt ] ) ),\\\n ceil( max( [ max( FellesBaseClass.FindInstance(ID).data['data'] )\\\n for ID,plt in self.plotIDs.iteritems() if plt ] ) )\\\n ]\\\n )\n\n self.panel_sizer.Fit(self)",
"def plot(data, layout, file_name):\n offline.plot({'data': data,\n 'layout': layout},\n filename='{}-{}_{}-{}.html'.format(file_name,\n todays_day,\n todays_month,\n currency))",
"def _generate_plot(ax, power_data, title, min_db, max_db):\n # only generate plots for the transducers that have data\n if power_data.size <= 0:\n return\n\n ax.set_title(title, fontsize=ZPLSCCPlot.font_size_large)\n return imshow(ax, power_data, interpolation='none', aspect='auto', cmap='jet', vmin=min_db, vmax=max_db)"
] | [
"0.6310482",
"0.626213",
"0.6240595",
"0.62188065",
"0.61812156",
"0.61757153",
"0.61031306",
"0.6088691",
"0.6061382",
"0.59504133",
"0.59478855",
"0.5947725",
"0.594132",
"0.5940302",
"0.59187317",
"0.59125113",
"0.59071434",
"0.5900893",
"0.5887625",
"0.5882247",
"0.5876005",
"0.58701366",
"0.5846342",
"0.58333284",
"0.5824177",
"0.58192474",
"0.5810883",
"0.58077234",
"0.57937276",
"0.57904536",
"0.57904536",
"0.57904536",
"0.5785961",
"0.5777488",
"0.57707906",
"0.57642376",
"0.5759246",
"0.5754296",
"0.5751786",
"0.57474124",
"0.57451737",
"0.57437533",
"0.57423615",
"0.5730207",
"0.5726758",
"0.5715293",
"0.5695832",
"0.56905574",
"0.5687403",
"0.5682086",
"0.56797075",
"0.56774735",
"0.56645066",
"0.56626666",
"0.5658722",
"0.56586754",
"0.5654124",
"0.5653829",
"0.56534845",
"0.5649654",
"0.5634734",
"0.5631022",
"0.56303954",
"0.562782",
"0.5625997",
"0.5615927",
"0.5615378",
"0.5613272",
"0.5610835",
"0.56044585",
"0.5593699",
"0.55905485",
"0.55904603",
"0.5581374",
"0.55770737",
"0.55684036",
"0.55629843",
"0.5559714",
"0.5557205",
"0.555537",
"0.55525076",
"0.55486536",
"0.5545564",
"0.5545521",
"0.55445904",
"0.5538123",
"0.5526203",
"0.552375",
"0.5521457",
"0.5517333",
"0.5516343",
"0.55116844",
"0.5511109",
"0.55081415",
"0.55048585",
"0.5501946",
"0.5501242",
"0.549715",
"0.5495546",
"0.5483775",
"0.54816353"
] | 0.0 | -1 |
Model trained with full vocabulary classification. | def __init__(self, vocab_size: int, embedding_dim: int, hidden_size: int, dropout: float = 0.2,
read_context: bool = False, pad_idx: int = Vocabulary.pad_idx):
super(FullVocabularyModel, self).__init__()
self.embedding = nn.Embedding(vocab_size, embedding_dim, padding_idx=pad_idx)
self.embed_dropout = nn.Dropout(dropout)
self.rnn = nn.LSTM(embedding_dim, hidden_size)
self.linear = nn.Linear(hidden_size, vocab_size)
self.loss_fn = nn.CrossEntropyLoss(ignore_index=pad_idx)
self.vocab_size = vocab_size
self.read_context = read_context
self.pad_idx = pad_idx
initrange = 0.5 / embedding_dim
self.embedding.weight.data.uniform_(-initrange, initrange)
self.embedding.weight.data[pad_idx].zero_() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self):\n self.sess = tf.Session()\n vocab_path = os.path.join(params.data_dir, \"vocab%d\" % params.vocab_size)\n self.vocab, self.rev_vocab = data_utils.initialize_vocabulary(vocab_path)\n self.model = model_utils.create_model(self.sess, True)\n self.model.batch_size = 1 # Respond 1 sentence at a time.",
"def retrain_model(self, new_sentences, with_punctiations):\n if with_punctiations:\n model_ = Word2Vec.load('./model/model_word2vec.bin')\n else:\n model_ = Word2Vec.load('./model/model_no_punctuation_word2vec.bin')\n\n model_.build_vocab(new_sentences, update=True)\n model_.train(new_sentences, total_examples=model_.corpus_count, epochs=model_.iter)\n\n if with_punctiations:\n model_.save('./model/model_word2vec.bin')\n else:\n model_.save('./model/model_no_punctuation_word2vec.bin')\n\n\n pass",
"def train(self):\n # >>> YOUR ANSWER HERE\n\n fake_docs = []\n fake_words = []\n fake_words_freq = {}\n real_docs = []\n real_words = []\n real_words_freq = {}\n\n # load fake data of the training dataset, store the docs and words\n fake_data = open(self.train_data['fake']).readlines()\n for sentence in fake_data:\n preprocess_sentence = sentence.strip()\n fake_docs.append(preprocess_sentence)\n fake_words.extend(preprocess_sentence.split())\n\n # load real data of the training dataset, store the docs, words and word frequencies.\n real_data = open(self.train_data['real']).readlines()\n for sentence in real_data:\n preprocess_sentence = sentence.strip()\n real_docs.append(preprocess_sentence)\n real_words.extend(preprocess_sentence.split())\n\n # remove stop words if necessary\n if self.REMOVE_STOPWORDS:\n fake_words = [word for word in fake_words if word not in self.stopwords]\n real_words = [word for word in real_words if word not in self.stopwords]\n\n # calculate all words' frequency\n for word in fake_words:\n self.vocabulary.add(word)\n fake_words_freq[word] = fake_words_freq.get(word, 0) + 1\n for word in real_words:\n self.vocabulary.add(word)\n real_words_freq[word] = real_words_freq.get(word, 0) + 1\n\n # pre-calculate the number of all docs, the number of docs per class and words frequency per class for\n # calculation in the training loop.\n n_doc = len(fake_docs) + len(real_docs)\n n_class = {'fake': len(fake_docs), 'real': len(real_docs)}\n big_doc_dict = {'fake': fake_words_freq, 'real': real_words_freq}\n fake_words_num = 0\n real_words_num = 0\n for w in self.vocabulary:\n fake_words_num += fake_words_freq.get(w, 0)\n real_words_num += real_words_freq.get(w, 0)\n words_frequency_per_class = {'fake': fake_words_num, 'real': real_words_num}\n\n # Training\n for c in self.classes:\n self.logprior[c] = math.log(n_class[c] / n_doc)\n for w in self.vocabulary:\n count_w_c = big_doc_dict[c].get(w, 0)\n log_likelihood = math.log((count_w_c + 1) / (len(self.vocabulary) + words_frequency_per_class[c]))\n self.loglikelihood[(w, c)] = log_likelihood\n # >>> END YOUR ANSWER",
"def trainModel( self, featureTrain, classTrain):",
"def model(self):\n filePath = self.config['data_path']['train_data']\n data = self.loadCSV(filePath)\n cleandata = self.preprocess(data)\n X, y = self.dataSplit(cleandata)\n X = self.CountVect(X, self.config['transform_path']['transform_model_path'])\n X_train, X_test, y_train, y_test = self.TrainTestSplit(X, y)\n self.MultinomialNB(X_train, X_test, y_train, y_test, self.config['nlp_path']['model_path'])",
"def create_train_model(self):\n st = LancasterStemmer()\n with open(self.data_path, encoding='utf8') as f_name:\n sentences = [[st.stem(w) for w, t in pos_tag(line.lower().split()) if 'N' in t] for line in f_name]\n sentences = [filter(lambda x: len(x) > 2, (word.strip(punctuation) for word in sentences)) for sent in sentences]\n model = Word2Vec(sentences,\n min_count=self.min_count,\n size=self.size,\n window=self.window,\n workers=4)\n model.save(self.model_path)",
"def all_categories(epochs=50):\n import models\n import tensorflow as tf\n from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint, TensorBoard\n\n with corpus.get_conn() as conn:\n posts, label_vectors = corpus.get_training(conn)\n\n preprocessed = np.array(models.preprocess(posts))\n del posts\n print(f'preprocessed.shape = {preprocessed.shape}')\n\n labels = np.array(label_vectors)\n del label_vectors\n print(f'labels.shape = {labels.shape}')\n permutation = np.random.permutation(preprocessed.shape[0])\n preprocessed = preprocessed[permutation]\n labels = labels[permutation]\n\n val_split = 0.15\n val_count = int(np.round(preprocessed.shape[0] * val_split))\n print(f'val_count = {val_count}')\n print(f'train labels mean = {np.mean(labels[:-val_count], axis=0)}')\n print(f'val labels mean = {np.mean(labels[-val_count:], axis=0)}')\n\n class_occurances = np.count_nonzero(labels[:-val_count], axis=0)\n class_weights = class_occurances / np.sum(class_occurances)\n class_weights = dict(enumerate(class_weights))\n print(f'class_weights = {class_weights}')\n\n model = models.multi()\n\n callbacks = [\n ReduceLROnPlateau(),\n EarlyStopping(patience=4),\n ModelCheckpoint(filepath='output/All/model.h5', save_best_only=True),\n TensorBoard(log_dir=os.path.join('logs', 'fit', datetime.now().strftime('%Y%m%d-%H%M%S')))\n ]\n\n history = model.fit(preprocessed, labels, callbacks=callbacks, epochs=epochs, verbose=2, validation_split=val_split, class_weight=class_weights, batch_size=64)\n # model.save('output/All/model.h5') not necessary when ModelCheckpoint callback used\n\n val_labels = labels[-val_count:]\n print(f'val_labels.shape = {val_labels.shape}')\n val_predict = (model.predict(preprocessed[-val_count:]) > 0.5) * 1 # turn predictions into integers\n print(f'val_predict.shape = {val_predict.shape}')\n val_predict = val_predict.reshape(val_labels.shape)\n\n print('final validation results per category:')\n for category in corpus.categories:\n category_index = corpus.categories[category]\n\n cat_labels = val_labels[:,category_index]\n cat_predict = val_predict[:,category_index]\n eq = cat_labels == cat_predict\n neq = cat_labels != cat_predict\n\n tp = np.sum(eq[cat_predict == 1], axis=0)\n tn = np.sum(eq[cat_predict == 0], axis=0)\n fp = np.sum(neq[cat_predict == 1], axis=0)\n fn = np.sum(neq[cat_predict == 0], axis=0)\n accuracy = (tp + tn) / val_labels.shape[0]\n precision = tp / (tp + fp)\n recall = tp / (tp + fn)\n f1 = 2.0 * precision * recall / (precision + recall)\n\n print(category)\n print(f' true pos = {tp}')\n print(f' true neg = {tn}')\n print(f' false pos = {fp}')\n print(f' false neg = {fn}')\n print(f' accuracy = {accuracy:.4f}')\n print(f' precision = {precision:.4f}')\n print(f' recall = {recall:.4f}')\n print(f' F_1 = {f1:.4f}')\n\n # LaTeX table content\n with open(f'output/All/latex_full.txt', 'a') as f:\n f.write('\\t\\t\\\\hline\\n')\n if tp > 0:\n f.write(f'\\t\\t{category} & {tp} & {tn} & {fp} & {fn} & {accuracy:.2f} & {precision:.2f} & {recall:.2f} & {f1:.2f} \\\\\\\\\\n')\n else:\n f.write(f'\\t\\t{category} & {tp} & {tn} & {fp} & {fn} & {accuracy:.2f} & 0 & 0 & 0 \\\\\\\\\\n')\n with open(f'output/All/latex_{category}.txt', 'w') as f:\n if tp > 0:\n f.write(f'\\t\\tOur Multi-Model & {accuracy:.4f} & {precision:.4f} & {recall:.4f} & {f1:.4f} \\\\\\\\\\n')\n else:\n f.write(f'\\t\\tOur Multi-Model & {accuracy:.4f} & 0 & 0 & 0 \\\\\\\\\\n')\n\n plot_hist(history, 'All', categorical=True)",
"def train(self, final_training=False):\n # initialize the model\n self.model = Word2Vec(\n min_count=3, # consider a merchant if merchant is present more than this threshold\n window=self.window,\n vector_size=self.embed_size,\n alpha=0.01, # learning rate\n min_alpha=0.001, # minimum learning rate\n negative=20, # number of random negative sampling\n )\n # build vocab\n corpus = self.prepare_corpus(final_training)\n self.model.build_vocab(corpus)\n\n # training\n self.model.train(corpus, total_examples=self.model.corpus_count, epochs=50)\n\n # init sims (Precompute L2-normalized embeddings)\n self.model.init_sims(replace=True)",
"def train_model(args, train_exs: List[SentimentExample]) -> SentimentClassifier:\n indexer = Indexer()\n stop_words = set(stopwords.words('english'))\n punkt = (',', '.', '...', '?', '\\'', '\\'\\'', '!', ':', ';')\n # Initialize feature extractor\n if args.model == \"TRIVIAL\":\n feat_extractor = None\n elif args.feats == \"UNIGRAM\":\n # Generate vocabulary\n for ex in train_exs:\n for word in ex.words:\n if word.lower() not in stop_words and word.lower() not in punkt:\n indexer.add_and_get_index(word.lower())\n feat_extractor = UnigramFeatureExtractor(indexer)\n elif args.feats == \"BIGRAM\":\n # Generate vocabulary\n for ex in train_exs:\n for i in range(0, len(ex.words) - 1):\n if stop_words.__contains__(ex.words[i]) and stop_words.__contains__(ex.words[i + 1]) or (\n punkt.__contains__(ex.words[i]) or punkt.__contains__(ex.words[i + 1])):\n continue\n bigram = ex.words[i] + ' ' + ex.words[i + 1]\n indexer.add_and_get_index(bigram.lower())\n feat_extractor = BigramFeatureExtractor(indexer)\n elif args.feats == \"BETTER\":\n # Generate vocabulary\n cnt = Counter()\n for ex in train_exs:\n cnt.update(\n word.lower() for word in ex.words if word.lower() not in stop_words and word.lower() not in punkt)\n cnt = dict(cnt.most_common(int(cnt.__len__() * 0.75)))\n for keys in cnt.keys():\n indexer.add_and_get_index(keys)\n feat_extractor = BetterFeatureExtractor(indexer)\n else:\n raise Exception(\"Pass in UNIGRAM, BIGRAM, or BETTER to run the appropriate system\")\n\n # Train the model\n if args.model == \"TRIVIAL\":\n model = TrivialSentimentClassifier()\n elif args.model == \"PERCEPTRON\":\n model = train_perceptron(train_exs, feat_extractor)\n elif args.model == \"LR\":\n model = train_logistic_regression(train_exs, feat_extractor)\n else:\n raise Exception(\"Pass in TRIVIAL, PERCEPTRON, or LR to run the appropriate system\")\n return model",
"def train():\n counts = {size: dict() for size in NGRAM_SIZES}\n for word in tqdm.tqdm(word_iterator(\"resources/datasets\")):\n if word == \"\":\n continue\n for size in NGRAM_SIZES:\n for token in ngrams(word, 2 * size):\n left, right = token[:size], token[size:]\n counts[size].setdefault(left, dict())\n counts[size][left].setdefault(right, 0)\n counts[size][left][right] += 1\n model = {size: dict() for size in NGRAM_SIZES}\n for size in NGRAM_SIZES:\n for left in counts[size]:\n total = sum(counts[size][left].values())\n model[size][left] = dict()\n for right in counts[size][left]:\n model[size][left][right] = math.log(\n counts[size][left][right] / total)\n with open(MODEL_FILENAME, \"wb\") as file:\n pickle.dump(model, file)",
"def train(self, trainfile):\r\n\r\n # We load the data and lower the text\r\n data_train = pd.read_csv(trainfile, sep = \"\\t\", names = [\"polarity\", \"category\", \"word\", \"offsets\", \"sentence\"])\r\n data_train['sentence_l'] = data_train['sentence'].apply(str.lower)\r\n data_train['word'] = data_train['word'].apply(str.lower)\r\n \r\n # We try to keep all the no/nor/not words as this changes radically the sentiment analysis\r\n data_train['sentence_l'] = data_train[\"sentence_l\"].apply(lambda sentence: sentence.replace(\"can\\'t\", \"can not\"))\r\n data_train['sentence_l'] = data_train[\"sentence_l\"].apply(lambda sentence: sentence.replace(\"n\\'t\", \" not\"))\r\n self.stopwords = stopwords.words(\"english\")\r\n self.stopwords.remove('nor')\r\n self.stopwords.remove('no')\r\n self.stopwords.remove('not')\r\n \r\n # We clean the train data and stem the words\r\n self.stemmer = nltk.porter.PorterStemmer()\r\n clean_sentences = []\r\n for row in data_train['sentence_l']:\r\n tokens = word_tokenize(row)\r\n tokens = [word for word in tokens if word.isalpha()]\r\n tokens = [w for w in tokens if not w in self.stopwords] \r\n tokens = [self.stemmer.stem(word) for word in tokens]\r\n clean_sentences.append(tokens)\r\n data_train['stems'] = clean_sentences\r\n \r\n # We also stem the target words to be coherent with the stemmed words in the sentences\r\n data_train['word'] = [self.stemmer.stem(word) for word in data_train['word']]\r\n \r\n # We recreate the sentences with the selected and cleaned words\r\n Classifier.create_sentence = staticmethod(Classifier.create_sentence)\r\n data_train.clean_sentence = Classifier.create_sentence(data_train.stems)\r\n \r\n # We create a BOW vector\r\n self.restaurant_vect = CountVectorizer(min_df=1, tokenizer=nltk.word_tokenize)\r\n reviews_counts = self.restaurant_vect.fit_transform(data_train.clean_sentence)\r\n \r\n # We transform the BOW vector with the tfidf scores\r\n self.tfidf_transformer = TfidfTransformer()\r\n reviews_tfidf = self.tfidf_transformer.fit_transform(reviews_counts)\r\n \r\n polarities = []\r\n for row in data_train['polarity']:\r\n if row == 'positive':\r\n polarities.append(1)\r\n if row == 'neutral':\r\n polarities.append(0)\r\n if row == 'negative':\r\n polarities.append(-1)\r\n data_train['polarity_floats'] = polarities\r\n \r\n # Split data into training and test sets\r\n test_size = 10\r\n X_train, X_test, y_train, y_test = train_test_split(reviews_tfidf, data_train.polarity_floats,\r\n test_size = test_size/100, random_state = None)\r\n \r\n ############# CNN MODEL ##############\r\n \r\n from keras.layers import Input, Dense, Embedding, Conv2D, MaxPool2D\r\n from keras.layers import Reshape, Flatten, Dropout, Concatenate\r\n from keras.callbacks import ModelCheckpoint\r\n from keras.optimizers import Adam\r\n from keras.models import Model\r\n \r\n sequence_length = X_train.shape[1] # 7\r\n vocabulary_size = X_train.shape[0] # 1503\r\n embedding_dim = 256\r\n filter_sizes = [3,4,5]\r\n num_filters = 512\r\n drop = 0.5\r\n \r\n epochs = 10\r\n batch_size = 50\r\n \r\n # this returns a tensor\r\n print(\"Creating Model...\")\r\n inputs = Input(shape=(sequence_length,), dtype='int32')\r\n embedding = Embedding(input_dim=vocabulary_size, output_dim=embedding_dim, input_length=sequence_length)(inputs)\r\n reshape = Reshape((sequence_length,embedding_dim,1))(embedding)\r\n \r\n conv_0 = Conv2D(num_filters, kernel_size=(filter_sizes[0], embedding_dim), padding='valid', kernel_initializer='normal', activation='relu')(reshape)\r\n conv_1 = Conv2D(num_filters, kernel_size=(filter_sizes[1], embedding_dim), padding='valid', kernel_initializer='normal', activation='relu')(reshape)\r\n conv_2 = Conv2D(num_filters, kernel_size=(filter_sizes[2], embedding_dim), padding='valid', kernel_initializer='normal', activation='relu')(reshape)\r\n \r\n maxpool_0 = MaxPool2D(pool_size=(sequence_length - filter_sizes[0] + 1, 1), strides=(1,1), padding='valid')(conv_0)\r\n maxpool_1 = MaxPool2D(pool_size=(sequence_length - filter_sizes[1] + 1, 1), strides=(1,1), padding='valid')(conv_1)\r\n maxpool_2 = MaxPool2D(pool_size=(sequence_length - filter_sizes[2] + 1, 1), strides=(1,1), padding='valid')(conv_2)\r\n \r\n concatenated_tensor = Concatenate(axis=1)([maxpool_0, maxpool_1, maxpool_2])\r\n flatten = Flatten()(concatenated_tensor)\r\n dropout = Dropout(drop)(flatten)\r\n output = Dense(units=1, activation='softmax')(dropout)\r\n \r\n # this creates a model that includes\r\n model = Model(inputs=inputs, outputs=output)\r\n \r\n checkpoint = ModelCheckpoint('weights.{epoch:03d}-{val_acc:.4f}.hdf5', monitor='val_acc', verbose=1, save_best_only=True, mode='auto')\r\n adam = Adam(lr=1e-4, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\r\n \r\n model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])\r\n print(\"Training Model...\")\r\n model.fit(X_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, callbacks=[checkpoint], validation_data=(X_test, y_test)) # starts training\r",
"def train_model(schema,fieldsToRead = None):\n\tif not fieldsToRead:\n\t\tfieldsToRead = schema[\"fields\"].keys()\n\n\tif(\"vector_size\" in schema):\n\t\tvectorSize = schema[\"vector_size\"]\n\telse:\n\t\tvectorSize = DEFAULT_VECTOR_SIZE\n\n\tsentences = []\n\t# build sentences:\n\tprint \"Building Feature vectors...\"\n\n\tread_sentences(schema, lambda x : sentences.append(merge_sentences_to_single_sentence(x, fieldsToRead)))\n\tprint \"Read \" + str(len(sentences)) + \" documents\"\n\tprint \"Training Model...\"\n\tmodelPath = model_path(schema)\n\tweightMatrixPath = weight_matrix_path(schema)\n\tsentences = transpose_sentences(sentences)\n\tmodel = Word2Vec(sentences, size=vectorSize, window=5, min_count=1, workers=4)\n\tmodel.save(modelPath)\n\tmodel.save_word2vec_format(weightMatrixPath)\n\tprint \"Finished training\"\n\treturn model",
"def embedding_train(total_corpus,emoteonly_corpus,textonly_corpus,save_fname_emote,save_fname_text,save_fname_intersect):\n wv_model = Word2Vec(min_count=100,size=100,negative=0.75,sg=0,hs=1,window=60)\n wv_model.build_vocab(sentences=total_corpus())\n wv_model2 = copy.deepcopy(wv_model)\n \n # train emoteonly\n wv_model.train(sentences=emoteonly_corpus(),epochs=10,total_examples=wv_model.corpus_count)\n wv_model.save(save_fname_emote)\n # train_textonly\n wv_model2.train(sentences=textonly_corpus(),epochs=10,total_examples=wv_model.corpus_count)\n wv_model2.save(save_fname_text)\n \n src_model = Word2Vec.load(save_fname_emote)\n dest_model = Word2Vec.load(save_fname_text)\n \n src_model.wv.save_word2vec_format(save_fname_intersect)\n dest_model.intersect_word2vec_format(save_fname_intersect, lockf=1.0, binary=False)\n\n dest_model.train(sentences=train_corpus(), total_examples=dest_model.corpus_count, epochs=20)\n dest_model.save(save_fname_intersect)\n return",
"def train(self, corpus): \n # TODO your code here\n \n for sentence in corpus.corpus:\n for i,dotum in enumerate(sentence.data[1:]):\n self.vocab[dotum.word][sentence.data[i].word] +=1\n self.word_counts[sentence.data[i].word] +=1\n self.total +=1\n self.v = len(self.vocab.keys())",
"def _train_model(self):\n raise NotImplementedError()",
"def fasttext_wordvectors(corpus_path, model_path):\n model = fasttext.train_unsupervised(corpus_path)\n model.save_model(model_path)\n return model",
"def train(self,\n max_epochs = 10, # number of max possible training iterations\n min_count = 5, # min frequency of usage to enter vocab\n vec_size = 100, # size of feature vectors\n max_alpha = 0.025, # starting learning rate\n min_alpha = 0.00025, # lowest learning rate\n save_name = None):\n\n if not self.tagged_docs and not (self.paperdf and self.authordf):\n print('no data to train.')\n return\n\n self.model.epochs = max_epochs\n self.model.vocabulary.min_count = min_count\n self.model.vector_size = vec_size\n self.model.alpha = max_alpha\n self.model.min_alpha = min_alpha\n\n print('Training model.')\n print('Building Vocabulary.')\n self.model.build_vocab(self.tagged_docs)\n\n print('Training for', max_epochs, 'epochs.')\n self.epoch_logger = EpochLogger()\n self.model.train(self.tagged_docs, total_examples = self.model.corpus_count,\n epochs = self.model.epochs, callbacks = [self.epoch_logger])\n print(\"Finished in {} seconds.\".format(round(time.time() - self.epoch_logger.start_time, 3)))\n\n if save_name:\n filename = str(save_name) + '.model'\n self.model.save(filename)\n print(\"Model Saved as\", filename)\n\n # self._compute_util_data()",
"def build_model():\n \n #english trained optimized pipeline for word embedding\n nlp = spacy.load(\"en_core_web_md\") # this model will give you 300D\n \n pipeline = Pipeline([\n ('features', FeatureUnion([\n ('text_pipeline', Pipeline([\n ('vect', CountVectorizer(tokenizer=tokenize)),\n ('tfidf', TfidfTransformer()),\n ])),\n \n ('embeddings_pipeline', Pipeline([\n ('vect_trans',SpacyVectorTransformer(nlp)),\n ('reduce_dim', TruncatedSVD(50)),\n ])),\n \n ])),\n \n ('clf', MultiOutputClassifier(RandomForestClassifier()))\n ])\n \n parameters = {\n 'features__text_pipeline__vect__max_df': (0.5, 0.75, 1.0),\n 'features__embeddings_pipeline__reduce_dim__n_components':(50,60,70,100,120,130,150)\n }\n cv = GridSearchCV(pipeline, param_grid=parameters,cv=2)\n \n return cv",
"def train(self):\n\t\tself.model.fit(self.training_data, self.training_labels)",
"def build_model(self):\n doc_input = Input(shape=(self.max_sent_num ,self.max_sent_length,512), dtype='float32')\n doc_in=Flatten()(doc_input)\n \n #masked3=Masking(mask_value=Special_value)(doc_input)\n \n # self.model_sent = self.build_sent_encoder()\n \n # doc_encoder= TimeDistributed(self.model_sent)(doc_in)\n \n # document_att= self.build_doc_encoder(doc_encoder)\n dense= Dense(DENSE_SIZE,activation='softmax')(doc_in)\n #doc_att = self.build_sent_encoder(sent_encoder)\n # dense the output to 2 because the result is a binary classification.\n output_tensor = Dense(3, activation='softmax', name='classification')(dense)\n # Create Sentence-level Model\n self.model = Model(doc_input, output_tensor)",
"def normedmodel(corpus):\r\n if not os.path.isfile(NSAVE_NAME):\r\n if not os.path.isfile(SAVE_NAME):\r\n if not os.path.isfile(DP_NAME):\r\n generate_dps(corpus)\r\n generate_limittedmodel()\r\n generate_normedmodel()\r\n\r\n # Load the reduced word2vec model\r\n print('Loading model')\r\n model = KeyedVectors.load(NSAVE_NAME, mmap='r')\r\n print('Model loaded!')\r\n\r\n return model",
"def train_model(text, labels, max_depth):\n print('\\nTraining model...')\n cv = CountVectorizer(stop_words='english')\n tf = TfidfTransformer()\n rf = RandomForestClassifier(max_depth=max_depth)\n model = make_pipeline(cv, tf, rf)\n model.fit(text, labels)\n print('...and done!\\n')\n return model",
"def model_1(EMB_DIMS, filepath):\r\n \r\n [train_tweets, labels, test_tweets, nb_tokens, emb_matrix] = \\\r\n cPickle.load(open(os.path.join(filepath, \"train_test_{}embedding.pkl\".format(EMB_DIMS)), \"rb\"))\r\n\r\n np.random.seed(1)\r\n\r\n model = Sequential()\r\n model.add(Embedding(nb_tokens, EMB_DIMS, input_length=train_tweets.shape[1], weights=[emb_matrix]))\r\n model.add(Convolution1D(nb_filter=32, filter_length=3, border_mode='same', activation='relu'))\r\n model.add(MaxPooling1D(pool_length=2))\r\n model.add(Flatten())\r\n model.add(Dense(250, activation='relu'))\r\n model.add(Dense(1, activation='sigmoid'))\r\n model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\r\n print(model.summary())\r\n \r\n # Fit w/ 0.1 tr/te split\r\n model.fit(train_tweets, labels, validation_split=0.1, nb_epoch=2, batch_size=128, verbose=1)\r\n train = model.predict_proba(train_tweets, batch_size=128)\r\n test = model.predict_proba(test_tweets)\r\n\r\n # Saves the model and predictions\r\n save_model_predict(model, train, test, \"model1\")",
"def train(self, examples):\n \n for e in examples:\n\n class_label = None\n\n features = self.featurize(e[1])\n\n #determining which class corresponds to \n if(e[2] == '0'):\n class_label = self.class_zero\n self.class_zero_feature_count += len(features)\n self.class_zero_doc_count += 1\n else:\n class_label = self.class_one\n self.class_one_feature_count += len(features)\n self.class_one_doc_count += 1\n\n for f in features:\n if(f[1] == True):\n #adding feature to vocabulary\n self.vocab.add(f[0])\n #adding feature to class to keep track of counts\n class_label[f[0]] += 1\n \n\n self.total_docs = len(examples)",
"def training(self):\r\n self.model, self.voc = svm_clf_training('all', self.dataset)\r\n return 0",
"def get_fitted_model(dialog_acts, utterances):\n voc = []\n for u in utterances:\n for w in u.split():\n voc.append(w)\n\n voc = set(voc)\n vectorizer = CountVectorizer(vocabulary=voc)\n x = vectorizer.fit_transform(utterances).toarray()\n\n array(dialog_acts)\n\n label_encoder = LabelEncoder()\n one_hot_encoder = OneHotEncoder(sparse=False)\n\n integer_encoded = label_encoder.fit_transform(dialog_acts)\n integer_encoded = integer_encoded.reshape(len(integer_encoded), 1)\n y = one_hot_encoder.fit_transform(integer_encoded)\n\n x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=0)\n\n if not os.path.isfile(\"../TextClassification/model_binary_RFC.sav\"):\n with open(\"../TextClassification/model_binary_RFC.sav\", \"wb\") as model:\n classifier = RandomForestClassifier(n_estimators=500)\n classifier.fit(x_train, y_train)\n\n try:\n pickle.dump(classifier, model)\n model.close()\n\n except MemoryError:\n print(\"I failed dumping.\\n\\n\")\n model.close()\n\n else:\n with open(\"../TextClassification/model_binary_RFC.sav\", \"rb\") as training_model:\n classifier = pickle.load(training_model)\n training_model.close()\n\n\n return classifier, x, y, x_test, y_test, label_encoder.classes_, vectorizer",
"def create_model_simple(vocabulary_size, input_word_count, embedding_dims=50):\n model = Sequential()\n\n # we start off with an efficient embedding layer which maps\n # our vocab indices into embedding_dims dimensions\n model.add(Embedding(vocabulary_size, embedding_dims, input_length=input_word_count))\n\n # we add a GlobalAveragePooling1D, which will average the embeddings\n # of all words in the document\n model.add(GlobalAveragePooling1D())\n\n # We project onto a single unit output layer, and squash it with a sigmoid:\n model.add(Dense(1, activation=\"sigmoid\"))\n\n model.compile(loss=\"binary_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"])\n\n return model",
"def training(train_data, dev_data, param):\n text_to_vec = TextToVec(**param)\n\n # Fit with both train and dev data\n text_to_vec.fit(train_data['data'] + dev_data['data'])\n word_vec_map = text_to_vec.vectorizer.get_feature_names()\n train_vec = text_to_vec.transform(train_data['data'])\n dev_vec = text_to_vec.transform(dev_data['data'])\n logger.info(f\"train vec size:{train_vec.shape}, dev vec size:{dev_vec.shape}\")\n\n # # apply weights on tfidf based on whether the word appear in multiple classes\n # tt_occ = Counter(train_data['encoded_label'])\n # weight_list = []\n # for i in range(train_vec.shape[1]): # For every feature\n # occ = Counter(train_data['encoded_label'][train_vec[:, i] > 0.0])\n # for key, value in occ.items():\n # occ[key] = value/tt_occ[key]\n # weight_list.append(np.std(list(occ.values()))/0.35)\n # weight = np.array(weight_list).reshape(1, -1)\n # weight = weight/np.max(weight)\n # train_vec = np.multiply(train_vec, weight)\n\n # Perform oversampling on training data\n if param['balanced'] not in ['Bootstrap', 'Handsample']:\n logger.info(f\"class info before resampling: {sorted(Counter(train_data['encoded_label']).items())}\")\n train_vec, train_data['encoded_label'] = resample(X_train=train_vec, y_train=train_data['encoded_label'], balance=param['balanced'])\n logger.info(f\"class info after resampling:{sorted(Counter(train_data['encoded_label']).items())}\")\n\n # Fit model\n if param['classifier'] == 'MultinomialNB':\n clf = MultinomialNB()\n elif param['classifier'] == 'LDA':\n clf = LinearDiscriminantAnalysis()\n else:\n clf = svm.LinearSVC()\n\n if param['multiclass'] == 'OnevsOne':\n model = OneVsOneClassifier(clf)\n else:\n model = OneVsRestClassifier(clf)\n\n if param['classifier'] == 'LinearSVM' or param['multiclass'] == 'OnevsOne':\n logger.info(f'Fitting model: {param}')\n model = model.fit(train_vec, train_data['encoded_label'])\n train_prediction = model.predict(train_vec)\n dev_prediction = model.predict(dev_vec)\n else:\n logger.info(f'Fitting model: {param}')\n model = model.fit(train_vec, train_data['binary_label'])\n train_prediction = np.argmax(model.predict(train_vec), axis=1)\n dev_prediction = np.argmax(model.predict(dev_vec), axis=1)\n\n\n return train_prediction, dev_prediction, train_vec.shape, dev_vec.shape, model, word_vec_map",
"def train_model(X, y, model_type, ngram_type, label_type):\n assert(label_type in ['oh', 'ed'])\n assert(model_type in ['linear', 'mlp'])\n assert(ngram_type in ['word', 'char'])\n\n # tensorflow models aren't fork safe, which means they can't be served via uwsgi\n # as work around, we can serve a pure sklearn model\n # we should be able to find another fix\n\n if label_type == 'oh' and model_type == 'linear':\n\n y = np.argmax(y, axis = 1)\n\n clf = Pipeline([\n ('vect', CountVectorizer()),\n ('tfidf', TfidfTransformer()),\n ('clf', LogisticRegression()),\n ])\n\n params = {\n 'vect__max_features': 10000,\n 'vect__ngram_range': (1,2),\n 'vect__analyzer' : ngram_type,\n 'tfidf__sublinear_tf' : True,\n 'tfidf__norm' :'l2',\n 'clf__C' : 10,\n }\n else:\n if label_type == 'oh':\n y = one_hot(y)\n print(np.unique(y))\n\n clf = Pipeline([\n ('vect', CountVectorizer()),\n ('tfidf', TfidfTransformer()),\n ('to_dense', DenseTransformer()),\n ('clf', KerasClassifier(build_fn=make_mlp, output_dim = y.shape[1], verbose=False)),\n ])\n cv_results = pd.read_csv('cv_results.csv')\n query = \"model_type == '%s' and ngram_type == '%s' and label_type == '%s'\" % (model_type, ngram_type, label_type)\n params = cv_results.query(query)['best_params'].iloc[0]\n params = json.loads(params)\n print(\"parameters\", params)\n return clf.set_params(**params).fit(X,y)",
"def generate_limittedmodel():\r\n print('Loading model')\r\n model = KeyedVectors.load_word2vec_format(BIN_NAME, binary=True)\r\n print('Model loaded!')\r\n\r\n print('Loading dot products')\r\n dp = np.load(DP_NAME)\r\n print('Dot products loaded')\r\n\r\n print('Filtering vocab')\r\n for name, vocab in list(model.vocab.items()):\r\n if dp[vocab.index] < MAX_DEGREE:\r\n del model.vocab[name]\r\n\r\n il = list(model.vocab.items())\r\n print('Sorting vocab')\r\n il.sort(key=lambda x: x[1].index)\r\n\r\n # Find the indexes of the words that are being kept\r\n print('Generating indexes')\r\n indexes = []\r\n for i in range(0, len(il)):\r\n name, vocab = il[i]\r\n indexes.append(vocab.index)\r\n model.vocab[name].index = i\r\n\r\n print('Modifying model weights')\r\n model.syn0 = model.syn0[indexes]\r\n\r\n print('Saving file')\r\n model.save_word2vec_format(SAVE_NAME, binary=True)",
"def train_model(self, d=0.7):\n #eg: model = spammy.train_model()\n count_dict = self.freq_count()\n N = sum(count_dict.values())\n n_plus = len(count_dict)\n alpha = (d * n_plus) / N\n normalizer = alpha * (1/len(self._vocab_set))\n model = defaultdict(lambda:normalizer)\n \n for word in set(self._vocab_set):\n prob = normalizer\n if word in count_dict:\n prob += (count_dict[word] - d) / N\n prob = math.log(prob)\n model[word] = prob\n return model",
"def train_text_model(train_dir, num_steps):\n if tf.gfile.Exists(train_dir):\n # Delete old model\n tf.gfile.DeleteRecursively(train_dir)\n tf.gfile.MakeDirs(train_dir)\n\n with tf.Graph().as_default():\n model = TextModel(_CONFIG)\n # Specify the loss function:\n one_hot_labels = slim.one_hot_encoding(model.labels, model.nb_emotions)\n slim.losses.softmax_cross_entropy(model.logits, one_hot_labels)\n total_loss = slim.losses.get_total_loss()\n\n # Create some summaries to visualize the training process\n # Use tensorboard --logdir=train_dir, careful with path (add Documents/tumblr-sentiment in front of train_dir)\n # Different from the logs, because computed on different mini batch of data\n tf.summary.scalar('Loss', total_loss)\n \n # Specify the optimizer and create the train op:\n optimizer = tf.train.AdamOptimizer(learning_rate=model.learning_rate)\n train_op = slim.learning.create_train_op(total_loss, optimizer)\n\n batch_size = _CONFIG['batch_size']\n initial_lr = _CONFIG['initial_lr']\n decay_factor = _CONFIG['decay_factor']\n nb_batches = model.dataset.num_samples / batch_size\n def train_step_fn(session, *args, **kwargs):\n # Decaying learning rate every epoch\n if train_step_fn.step % (nb_batches) == 0:\n lr_decay = decay_factor ** train_step_fn.epoch\n session.run(model.lr_rate_assign, feed_dict={model.lr_rate_placeholder: initial_lr * lr_decay})\n print('New learning rate: {0}'. format(initial_lr * lr_decay))\n train_step_fn.epoch += 1\n\n # Initialise embedding weights\n if train_step_fn.step == 0:\n session.run(model.embedding_init, feed_dict={model.embedding_placeholder: model.embedding})\n total_loss, should_stop = train_step(session, *args, **kwargs)\n\n train_step_fn.step += 1\n return [total_loss, should_stop]\n \n train_step_fn.step = 0\n train_step_fn.epoch = 0\n\n # Run the training:\n final_loss = slim.learning.train(\n train_op,\n logdir=train_dir,\n save_interval_secs=600,\n save_summaries_secs=600,\n train_step_fn=train_step_fn,\n number_of_steps=num_steps)\n \n print('Finished training. Last batch loss {0:.3f}'.format(final_loss))",
"def learn_models(self):\n\n influencers = self.influencers.infGroup\n\n self.complete_model = LanguageModel()\n self.influencer_models = { influencer: LanguageModel() for influencer in influencers }\n\n all_tweets = []\n # for influencer in tqdm(influencers, desc='Learning Models'):\n for influencer in influencers:\n tweets = [tweet for tweet in self.get_saved_tweets(influencer)]\n self.influencer_models[influencer].add_documents(tweets)\n all_tweets += tweets\n\n self.complete_model.add_documents(all_tweets)",
"def model_extract_document_embedding(self):\n input_ids = tf.keras.layers.Input(shape=(self.maxlen,), dtype=tf.int32, name=\"ids\")\n attention_mask = tf.keras.layers.Input(shape=(self.maxlen,), dtype=tf.int32, name=\"att\")\n token = tf.keras.layers.Input(shape=(self.maxlen,), dtype=tf.int32, name=\"tok\")\n\n # Embedding :\n if self.method_embedding == 'CamemBERT':\n Camembert_model = transformers.TFCamembertModel.from_pretrained(\"jplu/tf-camembert-base\")\n x = Camembert_model(input_ids, attention_mask=attention_mask, token_type_ids=token)\n elif self.method_embedding == 'FlauBERT':\n # lr = 0.00001\n Flaubert_model = transformers.TFFlaubertModel.from_pretrained(\"jplu/tf-flaubert-base-uncased\")\n x = Flaubert_model(input_ids, attention_mask=attention_mask, token_type_ids=token)\n elif self.method_embedding == 'XLM-RoBERTa':\n # lr = 0.00001\n XLMRoBERTa_model = transformers.TFXLMRobertaModel.from_pretrained(\"jplu/tf-xlm-roberta-base\")\n x = XLMRoBERTa_model(input_ids, attention_mask=attention_mask, token_type_ids=token)\n elif self.method_embedding == 'RoBERTa':\n # Experience Test path weights :\n PATH = '/kaggle/input/tf-roberta/'\n config = transformers.RobertaConfig.from_pretrained(PATH + 'config-roberta-base.json')\n Roberta_model = transformers.TFRobertaModel.from_pretrained(PATH + 'pretrained-roberta-base.h5',\n config=config)\n # Sinon :\n # Roberta_model = transformers.TFRobertaModel.from_pretrained('roberta-base')\n x = Roberta_model(input_ids, attention_mask=attention_mask, token_type_ids=token)\n elif self.method_embedding == 'BERT':\n BERT_model = transformers.TFBertModel.from_pretrained('bert-base-uncased')\n x = BERT_model(input_ids, attention_mask=attention_mask, token_type_ids=token)\n else:\n logger.critical(\"unknown embedding method name : '{}'\".format(self.method_embedding))\n\n # word vectors shape : (None, maxlen, 768)\n x = x[0]\n cls_token = x[:, 0, :]\n\n model = tf.keras.models.Model(inputs=[input_ids, attention_mask, token], outputs=cls_token)\n return model",
"def __init__(self, vocabulary_size=1000):\n self.vocabulary_size = vocabulary_size",
"def train(fold_id = 0, verbose = False):\n df = pd.read_csv(config.INPUT_DIR / config.TRAIN_FILE)\n \n df[\"review\"] = df[\"review\"].apply(stem_sentence)\n\n df_train = df.loc[df[\"folds\"] != fold_id, :].reset_index(drop=True)\n df_validation = df.loc[df[\"folds\"] == fold_id, :].reset_index(drop=True)\n\n tfidf_vec = TfidfVectorizer(tokenizer=word_tokenize, token_pattern=None, ngram_range=(1,1))\n \n t0 = time()\n tfidf_vec.fit(df_train[\"review\"])\n joblib.dump(tfidf_vec, config.SAVED_MODELS_DIR / \"tfidf_vectorizer.pkl\")\n t1 = time()\n \n features_train = tfidf_vec.transform(df_train[\"review\"])\n features_validation = tfidf_vec.transform(df_validation[\"review\"])\n t2 = time()\n\n #clf = linear_model.LogisticRegression()\n clf = naive_bayes.MultinomialNB()\n clf.fit(features_train, df_train[\"sentiment\"])\n joblib.dump(clf, config.SAVED_MODELS_DIR / \"classifier_model.pkl\")\n t3 = time()\n\n preds = clf.predict(features_validation)\n acc = metrics.accuracy_score(df_validation[\"sentiment\"], preds)\n prec = metrics.precision_score(df_validation[\"sentiment\"], preds)\n\n print(f\"Accuracy: {np.round(acc,3)}\\t Precision: {np.round(prec, 3)}\")\n\n if verbose:\n print(f\"TfidfVectorizer fitted in {int(t1 - t0)} seconds.\")\n print(f\"TfidfVectorizer vocabulary length: {len(tfidf_vec.vocabulary_)}\")\n print(f\"Vectorizer transformed train and validation data in {int(t2 - t1)} seconds.\")\n print(f\"Model fitted in {int(t3 - t2)} seconds.\")",
"def train(\n # fmt: off\n lang: (\"Model language\", \"positional\", None, str),\n output_path: (\"Output directory to store model in\", \"positional\", None, Path),\n train_path: (\"Location of JSON-formatted training data\", \"positional\", None, Path),\n dev_path: (\"Location of JSON-formatted development data\", \"positional\", None, Path),\n raw_text: (\"Path to jsonl file with unlabelled text documents.\", \"option\", \"rt\", Path) = None,\n base_model: (\"Name of model to update (optional)\", \"option\", \"b\", str) = None,\n pipeline: (\"Comma-separated names of pipeline components\", \"option\", \"p\", str) = \"tagger,parser,ner\",\n vectors: (\"Model to load vectors from\", \"option\", \"v\", str) = None,\n replace_components: (\"Replace components from base model\", \"flag\", \"R\", bool) = False,\n n_iter: (\"Number of iterations\", \"option\", \"n\", int) = 30,\n n_early_stopping: (\"Maximum number of training epochs without dev accuracy improvement\", \"option\", \"ne\", int) = None,\n n_examples: (\"Number of examples\", \"option\", \"ns\", int) = 0,\n use_gpu: (\"Use GPU\", \"option\", \"g\", int) = -1,\n version: (\"Model version\", \"option\", \"V\", str) = \"0.0.0\",\n meta_path: (\"Optional path to meta.json to use as base.\", \"option\", \"m\", Path) = None,\n init_tok2vec: (\"Path to pretrained weights for the token-to-vector parts of the models. See 'spacy pretrain'. Experimental.\", \"option\", \"t2v\", Path) = None,\n parser_multitasks: (\"Side objectives for parser CNN, e.g. 'dep' or 'dep,tag'\", \"option\", \"pt\", str) = \"\",\n entity_multitasks: (\"Side objectives for NER CNN, e.g. 'dep' or 'dep,tag'\", \"option\", \"et\", str) = \"\",\n noise_level: (\"Amount of corruption for data augmentation\", \"option\", \"nl\", float) = 0.0,\n orth_variant_level: (\"Amount of orthography variation for data augmentation\", \"option\", \"ovl\", float) = 0.0,\n eval_beam_widths: (\"Beam widths to evaluate, e.g. 4,8\", \"option\", \"bw\", str) = \"\",\n gold_preproc: (\"Use gold preprocessing\", \"flag\", \"G\", bool) = False,\n learn_tokens: (\"Make parser learn gold-standard tokenization\", \"flag\", \"T\", bool) = False,\n textcat_multilabel: (\"Textcat classes aren't mutually exclusive (multilabel)\", \"flag\", \"TML\", bool) = False,\n textcat_arch: (\"Textcat model architecture\", \"option\", \"ta\", str) = \"bow\",\n textcat_positive_label: (\"Textcat positive label for binary classes with two labels\", \"option\", \"tpl\", str) = None,\n tag_map_path: (\"Location of JSON-formatted tag map\", \"option\", \"tm\", Path) = None,\n verbose: (\"Display more information for debug\", \"flag\", \"VV\", bool) = False,\n debug: (\"Run data diagnostics before training\", \"flag\", \"D\", bool) = False,\n # fmt: on\n):\n util.fix_random_seed()\n util.set_env_log(verbose)\n\n # Make sure all files and paths exists if they are needed\n train_path = util.ensure_path(train_path)\n dev_path = util.ensure_path(dev_path)\n meta_path = util.ensure_path(meta_path)\n output_path = util.ensure_path(output_path)\n if raw_text is not None:\n raw_text = list(srsly.read_jsonl(raw_text))\n if not train_path or not train_path.exists():\n msg.fail(\"Training data not found\", train_path, exits=1)\n if not dev_path or not dev_path.exists():\n msg.fail(\"Development data not found\", dev_path, exits=1)\n if meta_path is not None and not meta_path.exists():\n msg.fail(\"Can't find model meta.json\", meta_path, exits=1)\n meta = srsly.read_json(meta_path) if meta_path else {}\n if output_path.exists() and [p for p in output_path.iterdir() if p.is_dir()]:\n msg.warn(\n \"Output directory is not empty\",\n \"This can lead to unintended side effects when saving the model. \"\n \"Please use an empty directory or a different path instead. If \"\n \"the specified output path doesn't exist, the directory will be \"\n \"created for you.\",\n )\n if not output_path.exists():\n output_path.mkdir()\n msg.good(f\"Created output directory: {output_path}\")\n\n tag_map = {}\n if tag_map_path is not None:\n tag_map = srsly.read_json(tag_map_path)\n # Take dropout and batch size as generators of values -- dropout\n # starts high and decays sharply, to force the optimizer to explore.\n # Batch size starts at 1 and grows, so that we make updates quickly\n # at the beginning of training.\n dropout_rates = util.decaying(\n util.env_opt(\"dropout_from\", 0.2),\n util.env_opt(\"dropout_to\", 0.2),\n util.env_opt(\"dropout_decay\", 0.0),\n )\n batch_sizes = util.compounding(\n util.env_opt(\"batch_from\", 100.0),\n util.env_opt(\"batch_to\", 1000.0),\n util.env_opt(\"batch_compound\", 1.001),\n )\n\n if not eval_beam_widths:\n eval_beam_widths = [1]\n else:\n eval_beam_widths = [int(bw) for bw in eval_beam_widths.split(\",\")]\n if 1 not in eval_beam_widths:\n eval_beam_widths.append(1)\n eval_beam_widths.sort()\n has_beam_widths = eval_beam_widths != [1]\n\n default_dir = Path(__file__).parent.parent / \"ml\" / \"models\" / \"defaults\"\n\n # Set up the base model and pipeline. If a base model is specified, load\n # the model and make sure the pipeline matches the pipeline setting. If\n # training starts from a blank model, intitalize the language class.\n pipeline = [p.strip() for p in pipeline.split(\",\")]\n msg.text(f\"Training pipeline: {pipeline}\")\n disabled_pipes = None\n pipes_added = False\n if use_gpu >= 0:\n activated_gpu = None\n try:\n activated_gpu = set_gpu(use_gpu)\n except Exception as e:\n msg.warn(f\"Exception: {e}\")\n if activated_gpu is not None:\n msg.text(f\"Using GPU: {use_gpu}\")\n else:\n msg.warn(f\"Unable to activate GPU: {use_gpu}\")\n msg.text(\"Using CPU only\")\n use_gpu = -1\n if base_model:\n msg.text(f\"Starting with base model '{base_model}'\")\n nlp = util.load_model(base_model)\n if nlp.lang != lang:\n msg.fail(\n f\"Model language ('{nlp.lang}') doesn't match language \"\n f\"specified as `lang` argument ('{lang}') \",\n exits=1,\n )\n if vectors:\n msg.text(f\"Loading vectors from model '{vectors}'\")\n _load_vectors(nlp, vectors)\n\n nlp.select_pipes(disable=[p for p in nlp.pipe_names if p not in pipeline])\n for pipe in pipeline:\n # first, create the model.\n # Bit of a hack after the refactor to get the vectors into a default config\n # use train-from-config instead :-)\n if pipe == \"parser\":\n config_loc = default_dir / \"parser_defaults.cfg\"\n elif pipe == \"tagger\":\n config_loc = default_dir / \"tagger_defaults.cfg\"\n elif pipe == \"ner\":\n config_loc = default_dir / \"ner_defaults.cfg\"\n elif pipe == \"textcat\":\n config_loc = default_dir / \"textcat_defaults.cfg\"\n elif pipe == \"senter\":\n config_loc = default_dir / \"senter_defaults.cfg\"\n else:\n raise ValueError(f\"Component {pipe} currently not supported.\")\n pipe_cfg = util.load_config(config_loc, create_objects=False)\n if vectors:\n pretrained_config = {\n \"@architectures\": \"spacy.VocabVectors.v1\",\n \"name\": vectors,\n }\n pipe_cfg[\"model\"][\"tok2vec\"][\"pretrained_vectors\"] = pretrained_config\n\n if pipe == \"parser\":\n pipe_cfg[\"learn_tokens\"] = learn_tokens\n elif pipe == \"textcat\":\n pipe_cfg[\"exclusive_classes\"] = not textcat_multilabel\n pipe_cfg[\"architecture\"] = textcat_arch\n pipe_cfg[\"positive_label\"] = textcat_positive_label\n\n if pipe not in nlp.pipe_names:\n msg.text(f\"Adding component to base model '{pipe}'\")\n nlp.add_pipe(nlp.create_pipe(pipe, config=pipe_cfg))\n pipes_added = True\n elif replace_components:\n msg.text(f\"Replacing component from base model '{pipe}'\")\n nlp.replace_pipe(pipe, nlp.create_pipe(pipe, config=pipe_cfg))\n pipes_added = True\n else:\n if pipe == \"textcat\":\n textcat_cfg = nlp.get_pipe(\"textcat\").cfg\n base_cfg = {\n \"exclusive_classes\": textcat_cfg[\"exclusive_classes\"],\n \"architecture\": textcat_cfg[\"architecture\"],\n \"positive_label\": textcat_cfg[\"positive_label\"],\n }\n if base_cfg != pipe_cfg:\n msg.fail(\n f\"The base textcat model configuration does\"\n f\"not match the provided training options. \"\n f\"Existing cfg: {base_cfg}, provided cfg: {pipe_cfg}\",\n exits=1,\n )\n msg.text(f\"Extending component from base model '{pipe}'\")\n disabled_pipes = nlp.select_pipes(\n disable=[p for p in nlp.pipe_names if p not in pipeline]\n )\n else:\n msg.text(f\"Starting with blank model '{lang}'\")\n lang_cls = util.get_lang_class(lang)\n nlp = lang_cls()\n\n if vectors:\n msg.text(f\"Loading vectors from model '{vectors}'\")\n _load_vectors(nlp, vectors)\n\n for pipe in pipeline:\n # first, create the model.\n # Bit of a hack after the refactor to get the vectors into a default config\n # use train-from-config instead :-)\n if pipe == \"parser\":\n config_loc = default_dir / \"parser_defaults.cfg\"\n elif pipe == \"tagger\":\n config_loc = default_dir / \"tagger_defaults.cfg\"\n elif pipe == \"morphologizer\":\n config_loc = default_dir / \"morphologizer_defaults.cfg\"\n elif pipe == \"ner\":\n config_loc = default_dir / \"ner_defaults.cfg\"\n elif pipe == \"textcat\":\n config_loc = default_dir / \"textcat_defaults.cfg\"\n elif pipe == \"senter\":\n config_loc = default_dir / \"senter_defaults.cfg\"\n else:\n raise ValueError(f\"Component {pipe} currently not supported.\")\n pipe_cfg = util.load_config(config_loc, create_objects=False)\n if vectors:\n pretrained_config = {\n \"@architectures\": \"spacy.VocabVectors.v1\",\n \"name\": vectors,\n }\n pipe_cfg[\"model\"][\"tok2vec\"][\"pretrained_vectors\"] = pretrained_config\n\n if pipe == \"parser\":\n pipe_cfg[\"learn_tokens\"] = learn_tokens\n elif pipe == \"textcat\":\n pipe_cfg[\"exclusive_classes\"] = not textcat_multilabel\n pipe_cfg[\"architecture\"] = textcat_arch\n pipe_cfg[\"positive_label\"] = textcat_positive_label\n\n pipe = nlp.create_pipe(pipe, config=pipe_cfg)\n nlp.add_pipe(pipe)\n\n # Update tag map with provided mapping\n nlp.vocab.morphology.tag_map.update(tag_map)\n\n # Multitask objectives\n multitask_options = [(\"parser\", parser_multitasks), (\"ner\", entity_multitasks)]\n for pipe_name, multitasks in multitask_options:\n if multitasks:\n if pipe_name not in pipeline:\n msg.fail(\n f\"Can't use multitask objective without '{pipe_name}' in \"\n f\"the pipeline\"\n )\n pipe = nlp.get_pipe(pipe_name)\n for objective in multitasks.split(\",\"):\n pipe.add_multitask_objective(objective)\n\n # Prepare training corpus\n msg.text(f\"Counting training words (limit={n_examples})\")\n corpus = GoldCorpus(train_path, dev_path, limit=n_examples)\n n_train_words = corpus.count_train()\n\n if base_model and not pipes_added:\n # Start with an existing model, use default optimizer\n optimizer = create_default_optimizer()\n else:\n # Start with a blank model, call begin_training\n cfg = {\"device\": use_gpu}\n optimizer = nlp.begin_training(lambda: corpus.train_examples, **cfg)\n nlp._optimizer = None\n\n # Load in pretrained weights (TODO: this may be broken in the config rewrite)\n if init_tok2vec is not None:\n components = _load_pretrained_tok2vec(nlp, init_tok2vec)\n msg.text(f\"Loaded pretrained tok2vec for: {components}\")\n\n # Verify textcat config\n if \"textcat\" in pipeline:\n textcat_labels = nlp.get_pipe(\"textcat\").cfg.get(\"labels\", [])\n if textcat_positive_label and textcat_positive_label not in textcat_labels:\n msg.fail(\n f\"The textcat_positive_label (tpl) '{textcat_positive_label}' \"\n f\"does not match any label in the training data.\",\n exits=1,\n )\n if textcat_positive_label and len(textcat_labels) != 2:\n msg.fail(\n \"A textcat_positive_label (tpl) '{textcat_positive_label}' was \"\n \"provided for training data that does not appear to be a \"\n \"binary classification problem with two labels.\",\n exits=1,\n )\n train_data = corpus.train_data(\n nlp,\n noise_level=noise_level,\n gold_preproc=gold_preproc,\n max_length=0,\n ignore_misaligned=True,\n )\n train_labels = set()\n if textcat_multilabel:\n multilabel_found = False\n for ex in train_data:\n train_labels.update(ex.gold.cats.keys())\n if list(ex.gold.cats.values()).count(1.0) != 1:\n multilabel_found = True\n if not multilabel_found and not base_model:\n msg.warn(\n \"The textcat training instances look like they have \"\n \"mutually-exclusive classes. Remove the flag \"\n \"'--textcat-multilabel' to train a classifier with \"\n \"mutually-exclusive classes.\"\n )\n if not textcat_multilabel:\n for ex in train_data:\n train_labels.update(ex.gold.cats.keys())\n if list(ex.gold.cats.values()).count(1.0) != 1 and not base_model:\n msg.warn(\n \"Some textcat training instances do not have exactly \"\n \"one positive label. Modifying training options to \"\n \"include the flag '--textcat-multilabel' for classes \"\n \"that are not mutually exclusive.\"\n )\n nlp.get_pipe(\"textcat\").cfg[\"exclusive_classes\"] = False\n textcat_multilabel = True\n break\n if base_model and set(textcat_labels) != train_labels:\n msg.fail(\n f\"Cannot extend textcat model using data with different \"\n f\"labels. Base model labels: {textcat_labels}, training data \"\n f\"labels: {list(train_labels)}\",\n exits=1,\n )\n if textcat_multilabel:\n msg.text(\n f\"Textcat evaluation score: ROC AUC score macro-averaged across \"\n f\"the labels '{', '.join(textcat_labels)}'\"\n )\n elif textcat_positive_label and len(textcat_labels) == 2:\n msg.text(\n f\"Textcat evaluation score: F1-score for the \"\n f\"label '{textcat_positive_label}'\"\n )\n elif len(textcat_labels) > 1:\n if len(textcat_labels) == 2:\n msg.warn(\n \"If the textcat component is a binary classifier with \"\n \"exclusive classes, provide '--textcat_positive_label' for \"\n \"an evaluation on the positive class.\"\n )\n msg.text(\n f\"Textcat evaluation score: F1-score macro-averaged across \"\n f\"the labels '{', '.join(textcat_labels)}'\"\n )\n else:\n msg.fail(\n \"Unsupported textcat configuration. Use `spacy debug-data` \"\n \"for more information.\"\n )\n\n # fmt: off\n row_head, output_stats = _configure_training_output(pipeline, use_gpu, has_beam_widths)\n row_widths = [len(w) for w in row_head]\n row_settings = {\"widths\": row_widths, \"aligns\": tuple([\"r\" for i in row_head]), \"spacing\": 2}\n # fmt: on\n print(\"\")\n msg.row(row_head, **row_settings)\n msg.row([\"-\" * width for width in row_settings[\"widths\"]], **row_settings)\n try:\n iter_since_best = 0\n best_score = 0.0\n for i in range(n_iter):\n train_data = corpus.train_dataset(\n nlp,\n noise_level=noise_level,\n orth_variant_level=orth_variant_level,\n gold_preproc=gold_preproc,\n max_length=0,\n ignore_misaligned=True,\n )\n if raw_text:\n random.shuffle(raw_text)\n raw_batches = util.minibatch(\n (nlp.make_doc(rt[\"text\"]) for rt in raw_text), size=8\n )\n words_seen = 0\n with tqdm.tqdm(total=n_train_words, leave=False) as pbar:\n losses = {}\n for batch in util.minibatch_by_words(train_data, size=batch_sizes):\n if not batch:\n continue\n try:\n nlp.update(\n batch,\n sgd=optimizer,\n drop=next(dropout_rates),\n losses=losses,\n )\n except ValueError as e:\n err = \"Error during training\"\n if init_tok2vec:\n err += \" Did you provide the same parameters during 'train' as during 'pretrain'?\"\n msg.fail(err, f\"Original error message: {e}\", exits=1)\n if raw_text:\n # If raw text is available, perform 'rehearsal' updates,\n # which use unlabelled data to reduce overfitting.\n raw_batch = list(next(raw_batches))\n nlp.rehearse(raw_batch, sgd=optimizer, losses=losses)\n docs = [ex.doc for ex in batch]\n if not int(os.environ.get(\"LOG_FRIENDLY\", 0)):\n pbar.update(sum(len(doc) for doc in docs))\n words_seen += sum(len(doc) for doc in docs)\n with nlp.use_params(optimizer.averages):\n util.set_env_log(False)\n epoch_model_path = output_path / f\"model{i}\"\n nlp.to_disk(epoch_model_path)\n nlp_loaded = util.load_model_from_path(epoch_model_path)\n for beam_width in eval_beam_widths:\n for name, component in nlp_loaded.pipeline:\n if hasattr(component, \"cfg\"):\n component.cfg[\"beam_width\"] = beam_width\n dev_dataset = list(\n corpus.dev_dataset(\n nlp_loaded,\n gold_preproc=gold_preproc,\n ignore_misaligned=True,\n )\n )\n nwords = sum(len(ex.doc) for ex in dev_dataset)\n start_time = timer()\n scorer = nlp_loaded.evaluate(dev_dataset, verbose=verbose)\n end_time = timer()\n if use_gpu < 0:\n gpu_wps = None\n cpu_wps = nwords / (end_time - start_time)\n else:\n gpu_wps = nwords / (end_time - start_time)\n with use_ops(\"numpy\"):\n nlp_loaded = util.load_model_from_path(epoch_model_path)\n for name, component in nlp_loaded.pipeline:\n if hasattr(component, \"cfg\"):\n component.cfg[\"beam_width\"] = beam_width\n dev_dataset = list(\n corpus.dev_dataset(\n nlp_loaded,\n gold_preproc=gold_preproc,\n ignore_misaligned=True,\n )\n )\n start_time = timer()\n scorer = nlp_loaded.evaluate(dev_dataset, verbose=verbose)\n end_time = timer()\n cpu_wps = nwords / (end_time - start_time)\n acc_loc = output_path / f\"model{i}\" / \"accuracy.json\"\n srsly.write_json(acc_loc, scorer.scores)\n\n # Update model meta.json\n meta[\"lang\"] = nlp.lang\n meta[\"pipeline\"] = nlp.pipe_names\n meta[\"spacy_version\"] = f\">={about.__version__}\"\n if beam_width == 1:\n meta[\"speed\"] = {\n \"nwords\": nwords,\n \"cpu\": cpu_wps,\n \"gpu\": gpu_wps,\n }\n meta.setdefault(\"accuracy\", {})\n for component in nlp.pipe_names:\n for metric in _get_metrics(component):\n meta[\"accuracy\"][metric] = scorer.scores[metric]\n else:\n meta.setdefault(\"beam_accuracy\", {})\n meta.setdefault(\"beam_speed\", {})\n for component in nlp.pipe_names:\n for metric in _get_metrics(component):\n meta[\"beam_accuracy\"][metric] = scorer.scores[metric]\n meta[\"beam_speed\"][beam_width] = {\n \"nwords\": nwords,\n \"cpu\": cpu_wps,\n \"gpu\": gpu_wps,\n }\n meta[\"vectors\"] = {\n \"width\": nlp.vocab.vectors_length,\n \"vectors\": len(nlp.vocab.vectors),\n \"keys\": nlp.vocab.vectors.n_keys,\n \"name\": nlp.vocab.vectors.name,\n }\n meta.setdefault(\"name\", f\"model{i}\")\n meta.setdefault(\"version\", version)\n meta[\"labels\"] = nlp.meta[\"labels\"]\n meta_loc = output_path / f\"model{i}\" / \"meta.json\"\n srsly.write_json(meta_loc, meta)\n util.set_env_log(verbose)\n\n progress = _get_progress(\n i,\n losses,\n scorer.scores,\n output_stats,\n beam_width=beam_width if has_beam_widths else None,\n cpu_wps=cpu_wps,\n gpu_wps=gpu_wps,\n )\n if i == 0 and \"textcat\" in pipeline:\n textcats_per_cat = scorer.scores.get(\"textcats_per_cat\", {})\n for cat, cat_score in textcats_per_cat.items():\n if cat_score.get(\"roc_auc_score\", 0) < 0:\n msg.warn(\n f\"Textcat ROC AUC score is undefined due to \"\n f\"only one value in label '{cat}'.\"\n )\n msg.row(progress, **row_settings)\n # Early stopping\n if n_early_stopping is not None:\n current_score = _score_for_model(meta)\n if current_score < best_score:\n iter_since_best += 1\n else:\n iter_since_best = 0\n best_score = current_score\n if iter_since_best >= n_early_stopping:\n msg.text(\n f\"Early stopping, best iteration is: {i - iter_since_best}\"\n )\n msg.text(\n f\"Best score = {best_score}; Final iteration score = {current_score}\"\n )\n break\n except Exception as e:\n msg.warn(f\"Aborting and saving final best model. Encountered exception: {e}\")\n finally:\n best_pipes = nlp.pipe_names\n if disabled_pipes:\n disabled_pipes.restore()\n with nlp.use_params(optimizer.averages):\n final_model_path = output_path / \"model-final\"\n nlp.to_disk(final_model_path)\n meta_loc = output_path / \"model-final\" / \"meta.json\"\n final_meta = srsly.read_json(meta_loc)\n final_meta.setdefault(\"accuracy\", {})\n final_meta[\"accuracy\"].update(meta.get(\"accuracy\", {}))\n final_meta.setdefault(\"speed\", {})\n final_meta[\"speed\"].setdefault(\"cpu\", None)\n final_meta[\"speed\"].setdefault(\"gpu\", None)\n meta.setdefault(\"speed\", {})\n meta[\"speed\"].setdefault(\"cpu\", None)\n meta[\"speed\"].setdefault(\"gpu\", None)\n # combine cpu and gpu speeds with the base model speeds\n if final_meta[\"speed\"][\"cpu\"] and meta[\"speed\"][\"cpu\"]:\n speed = _get_total_speed(\n [final_meta[\"speed\"][\"cpu\"], meta[\"speed\"][\"cpu\"]]\n )\n final_meta[\"speed\"][\"cpu\"] = speed\n if final_meta[\"speed\"][\"gpu\"] and meta[\"speed\"][\"gpu\"]:\n speed = _get_total_speed(\n [final_meta[\"speed\"][\"gpu\"], meta[\"speed\"][\"gpu\"]]\n )\n final_meta[\"speed\"][\"gpu\"] = speed\n # if there were no speeds to update, overwrite with meta\n if (\n final_meta[\"speed\"][\"cpu\"] is None\n and final_meta[\"speed\"][\"gpu\"] is None\n ):\n final_meta[\"speed\"].update(meta[\"speed\"])\n # note: beam speeds are not combined with the base model\n if has_beam_widths:\n final_meta.setdefault(\"beam_accuracy\", {})\n final_meta[\"beam_accuracy\"].update(meta.get(\"beam_accuracy\", {}))\n final_meta.setdefault(\"beam_speed\", {})\n final_meta[\"beam_speed\"].update(meta.get(\"beam_speed\", {}))\n srsly.write_json(meta_loc, final_meta)\n msg.good(\"Saved model to output directory\", final_model_path)\n with msg.loading(\"Creating best model...\"):\n best_model_path = _collate_best_model(final_meta, output_path, best_pipes)\n msg.good(\"Created best model\", best_model_path)",
"def loadmodels(): # type: () -> None\n\n global accsearch, unaccsearch, eulamodel\n\n accsearch = [row for row in helpers.accExamples if helpers.goodsize(row['Clause Text'])]\n accsearch = [addtoks(row) for row in accsearch]\n unaccsearch = [row for row in helpers.unaccExamples if helpers.goodsize(row['Clause Text'])]\n unaccsearch = [addtoks(row) for row in unaccsearch]\n modeldir = helpers.getmodelfolder()\n accargs = buildbertargs()\n accargs.output_dir = modeldir\n eulamodel = ClassificationModel('roberta', modeldir, args=accargs, weight=[2, 1], use_cuda=False)",
"def build_model_from_inputs(self):\n if self.term_list is None:\n # no supplied token list -- use vocabulary of the training dataset\n # self.term_list = self.vocabulary\n # info(\"Setting bag dimension to {} from input vocabulary.\".format(len(self.term_list)))\n # will generate the vocabulary from the input\n pass\n info(f\"Building {self.name} model\")\n bagger = None\n if self.config.max_terms is not None:\n bagger = Bag(vocabulary=self.term_list, weighting=self.base_name, ngram_range=self.ngram_range, max_terms=self.config.max_terms)\n else:\n bagger = Bag(vocabulary=self.term_list, weighting=self.base_name, ngram_range=self.ngram_range)\n\n train_idx = self.indices.get_train_instances()\n texts = Text.get_strings(self.text.data.get_slice(train_idx))\n bagger.map_collection(texts, fit=True, transform=False)\n self.term_list = bagger.get_vocabulary()\n\n self.dimension = len(self.term_list)\n self.config.dimension = self.dimension",
"def test():\n listpost,listclass = bayes.loaddataset()\n myvocablist = bayes.createlist(listpost)\n tmatrix = list()\n for doc in listpost:\n\t vec = bayes.word2vec(myvocablist,doc)\n\t tmatrix.append(vec)\n p0,p1,pa = bayes.train(tmatrix,listclass)\n testdoc1 = ['love','my','dalmation']\n testvec1 = bayes.word2vec(myvocablist,testdoc1)\n print testdoc1,'classify as :',bayes.classify(testvec1,p0,p1,pa)\n testdoc2 = ['stupid','love']\n testvec2 = bayes.word2vec(myvocablist,testdoc2)\n print testdoc2,'classify as :',bayes.classify(testvec2,p0,p1,pa)",
"def train_model_for_shap(allFeatures, train_ml, test_ml, df_ml, classification_model, language_model, fold):\n # list of analyzed language models\n model = classification_model\n print(type(model).__name__)\n\n features = set(allFeatures[language_model][fold])\n preds = []\n trues = []\n\n train_index = train_ml[fold]\n test_index = test_ml[fold]\n\n train_data = df_ml[features].iloc[train_index]\n target_train_data = df_ml[\"target_ml\"].iloc[train_index]\n test_data = df_ml[features].iloc[test_index]\n target_test_data = df_ml.iloc[test_index][\"target_ml\"]\n model.fit(train_data, target_train_data)\n\n preds.append(model.predict(test_data).tolist())\n trues.append(target_test_data.tolist())\n\n print(language_model)\n mcc = metrics.matthews_corrcoef(y_true=sum(trues, []), y_pred=sum(preds, []))\n f1 = metrics.f1_score(y_true=sum(trues, []), y_pred=sum(preds, []), average=\"weighted\")\n print(\"MCC: \", round(mcc, 3))\n print(\"F1: \", round(f1, 3))\n return model, train_data, test_data",
"def train_full_model(self, model_arch=\"dense_net_121\", pretrain=False) -> None:\n X_concat = np.concatenate((self.X_train, self.X_dev, self.X_test), axis=0)\n y_concat = np.concatenate((self.y_train, self.y_dev, self.y_test), axis=0)\n self.recognizer.set_model((self.img_size[1], self.img_size[0]), 0.3, model_arch)\n self.recognizer.model.compile(\n optimizer=keras.optimizers.Adam(),\n loss=keras.losses.SparseCategoricalCrossentropy(),\n metrics=[\"accuracy\"],\n )\n\n font_chars = list((self.pretrain_path / \"Alef\").iterdir())\n if len(font_chars) == 31 and pretrain:\n # print(self.recognizer.get_summary())\n print(\"Pretraining on font data.\")\n self.recognizer.model.fit(self.X_pretrain, self.y_pretrain) # pretraining\n\n print(\"Training on characters.\")\n self.recognizer.model.fit(\n X_concat,\n y_concat,\n epochs=6,\n )\n\n print(self.recognizer.get_summary())\n model_name = self.recognizer.get_model_name()\n self.recognizer.save_model(model_name)",
"def train(self, corpus): \n for sentence in corpus.corpus:\n prev_word = None\n for datum in sentence.data:\n word = datum.word\n self.unigram_count[word] += 1\n if prev_word != None:\n self.bigram_count[prev_word][word] += 1\n prev_word = word\n \n self.vocabulary_size = len(self.unigram_count)\n self.num_words = sum(self.unigram_count.values())",
"def __init__(\n self,\n vocabulary_sizes,\n max_length,\n _categorecal_features,\n num_ordinal_features,\n dense_nodes,\n pretrained_embeddings,\n quiet=False,\n ):\n\n # Note that the vocabulary size will have to accomm\n nodes_in_embedding_layer = [\n max(2, int(np.ceil(np.sqrt(np.sqrt(v))))) for v in vocabulary_sizes\n ]\n\n # Create embeddings for the categorical inputs\n embedding_inputs = []\n flat_embeddings = []\n models = []\n self.emb_names = [\n (c.replace(\" \", \"_\") + \"_embedding\") for c in _categorecal_features\n ]\n\n for i, vocab_size in enumerate(vocabulary_sizes):\n\n embedding_inputs.append(Input(shape=(max_length,)))\n if len(pretrained_embeddings) == 0:\n embedding_i = Embedding(\n vocab_size,\n nodes_in_embedding_layer[i],\n name=self.emb_names[i],\n input_length=max_length, # weights=[word_weight_matrix],\n trainable=True,\n )(embedding_inputs[i])\n else:\n embedding_i = Embedding(\n vocab_size,\n nodes_in_embedding_layer[i],\n name=self.emb_names[i],\n input_length=max_length,\n weights=[pretrained_embeddings[i]],\n trainable=False,\n )(embedding_inputs[i])\n\n flat_embeddings.append(Flatten()(embedding_i))\n models.append(Model(inputs=embedding_inputs[i], outputs=flat_embeddings[i]))\n\n # Merge embeddings with ordinal inputs\n ordinal_inputs = [Input(shape=(1,)) for i in range(num_ordinal_features)]\n concatenated = concatenate(flat_embeddings + ordinal_inputs)\n\n # Deep network after all inputs have been incorporated\n hidden_layers = [concatenated]\n for i in range(len(dense_nodes)):\n hidden_layer = Dense(dense_nodes[i], activation=\"relu\")(\n BatchNormalization()(hidden_layers[i])\n )\n hidden_layers.append(hidden_layer)\n\n output = Dense(1, activation=\"sigmoid\")(hidden_layers[-1])\n self.merged_model = Model(\n inputs=embedding_inputs + ordinal_inputs, outputs=output\n )\n\n # print(self.merged_model.summary())\n if not quiet:\n if len(pretrained_embeddings) == 0:\n plot_model(\n self.merged_model,\n to_file=\"train_embeddings.png\",\n show_shapes=True,\n show_layer_names=True,\n )\n else:\n plot_model(\n self.merged_model,\n to_file=\"pretrained_embeddings.png\",\n show_shapes=True,\n show_layer_names=True,\n )",
"def load_model():\n prepro = Prepro(PATH_STOPSWORD, PATH_ACRONYM)\n vectorizer = joblib.load(PATH_TFIDF)\n label_encoder = joblib.load(PATH_ENCODER)\n model_svm = joblib.load(PATH_SVM)\n model_nb = joblib.load(PATH_NB)\n model_lr = joblib.load(PATH_LR)\n return prepro, vectorizer, label_encoder, model_svm, model_nb, model_lr",
"def vocabulary(self, config=Config()):\n raise NotImplementedError(\"Class %s doesn't implement vocabulary()\" % self.__class__.__name__)",
"def build_model(self, documents):\n self.vectorizer = TfidfVectorizer(\n stop_words='english', lowercase=True).fit(documents)\n self.vectors = self.vectorizer.transform(documents)",
"def test_online_learning(self):\n model = PoincareModel(self.data, burn_in=0, negative=3)\n self.assertEqual(len(model.kv.vocab), 7)\n self.assertEqual(model.kv.vocab['kangaroo.n.01'].count, 3)\n self.assertEqual(model.kv.vocab['cat.n.01'].count, 1)\n model.build_vocab([('kangaroo.n.01', 'cat.n.01')], update=True) # update vocab\n self.assertEqual(model.kv.vocab['kangaroo.n.01'].count, 4)\n self.assertEqual(model.kv.vocab['cat.n.01'].count, 2)",
"def train(corpus, iterations=100) :\n \n # Model vocabulary\n source_vocabulary = set()\n for (target_words, source_words) in corpus:\n source_vocabulary = source_vocabulary.union(set(source_words))\n \n # Initialize the probabilities of every arrangement by a uniform value\n default_probability = 1 / len(source_vocabulary)\n probabilities = collections.defaultdict(lambda: default_probability)\n \n # Initialize model\n model = collections.defaultdict(collections.defaultdict)\n \n for i in range(iterations):\n # Normalized total\n normalize_total = collections.defaultdict(lambda: 0.0)\n # Arrangement total\n arrangement_total = collections.defaultdict(lambda: 0.0)\n # Source total\n source_total = collections.defaultdict(lambda: 0.0)\n \n for (target_words, source_words) in corpus:\n # Calculate normalization factor\n for target_word in target_words:\n normalize_total[target_word] = 0.0\n for source_word in source_words:\n normalize_total[target_word] += probabilities[(target_word, source_word)]\n \n # Calculate totals\n for target_word in target_words:\n for source_word in source_words:\n total = probabilities[(target_word, source_word)] / normalize_total[target_word]\n arrangement_total[(target_word, source_word)] += total\n source_total[source_word] += total\n \n # Calculate probability\n for (target_word, source_word) in arrangement_total.keys():\n probabilities[(target_word, source_word)] = arrangement_total[(target_word, source_word)] / source_total[source_word]\n \n # Convert model to a dictionary\n for target_word, source_word in probabilities:\n model[source_word][target_word] = probabilities[(target_word, source_word)]\n \n return model",
"def tokenize(self):\n\n x = [] # input documents\n for file_path in glob.glob(self.train_dir + '*.txt'):\n file_as_string = open(file_path).read()\n x.append(file_as_string)\n\n self.tokenizer.fit_on_texts(x)\n print('input vocabulary size:', len(self.tokenizer.word_index))\n pickle_file = open('Model/tokenizer.p', 'wb')\n pickle.dump(self.tokenizer, pickle_file)",
"def trainingModel4wmd(corpus):\n model = Word2Vec(corpus, workers = nCores, size = 100, window = 300,\n min_count = 2, iter = 250)\n # model = Word2Vec(corpus)\n\n # use the following if we want to normalize the vectors\n model.init_sims(replace=True)\n\n return model",
"def load_target_vocab(self):\n vocab = [line.split()[0] for line in open(os.path.join('preprocessed', 'all_vocab.txt'), 'r').read().splitlines()]\n self.word2idx = {word: idx for idx, word in enumerate(vocab)}\n self.idx2word = {idx: word for idx, word in enumerate(vocab)}\n self.vocab_size = len(self.word2idx)",
"def train_model(self):\n self.best_epoch = {'auto':{}, 'coffee':{}, 'movie':{}, 'pizza':{}, 'restaurant':{}, 'uber':{} }\n self.best_f1 = {'auto':{}, 'coffee':{}, 'movie':{}, 'pizza':{}, 'restaurant':{}, 'uber':{} }\n for t in self.topic:\n if t != 'other':\n for st in self.topic2sub_topic[t].keys():\n\n print(\"Now training the classsfier for topic: \", t, \" ; intent: \", st)\n print(128 * \"=\")\n print(\"Input: str; Output: boolean(if the str contents the intent: \", st, \" ).\")\n print(64 * \"-\")\n X, y = self.get_data(t, st)\n print(\"data_loaded!\")\n X_train, X_dev, y_train, y_dev = self.my_train_test_split(X, y)\n best_f1 = 0\n for e in range(1,10):\n model = tf.keras.Sequential()\n model.add(tf.keras.layers.InputLayer(input_shape=[1024, ]))\n model.add(tf.keras.layers.Dense(64, activation='relu'))\n model.add(tf.keras.layers.Dense(64, activation='relu'))\n model.add(tf.keras.layers.Dense(1, activation='relu'))\n model.compile(loss='mean_squared_logarithmic_error', optimizer='adam', metrics=[metrics.mae, metrics.categorical_accuracy])\n model.fit(X_train, y_train, epochs=e, batch_size=128)\n print(\"f1_score on dev set: \")\n f1 = self.f1_score_model(model, X_dev, y_dev)[0]\n if f1 > best_f1:\n self.model_zoo[t][st] = model\n model.save_weights(self.trained_w_folder+\"/%s/%s.h5\" %(t,st))\n self.best_epoch[t][st] = e\n self.best_f1[t][st] = f1\n best_f1 = f1\n\n print(64*\"=\")\n print()",
"def single_category(category, epochs=50):\n import models\n import tensorflow as tf\n from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint, TensorBoard\n\n with corpus.get_conn() as conn:\n posts, label_vectors = corpus.get_training(conn)\n\n preprocessed = np.array(models.preprocess(posts))\n del posts\n print(f'preprocessed.shape = {preprocessed.shape}')\n\n category_index = corpus.categories[category]\n labels = np.array(label_vectors[:,category_index])\n del label_vectors\n print(f'labels.shape = {labels.shape}')\n\n # shuffle data and labels with same permutation\n # because model.fit() will take validation data from the end and shuffle afterwards\n permutation = np.random.permutation(preprocessed.shape[0])\n preprocessed = preprocessed[permutation]\n labels = labels[permutation]\n\n val_split = 0.15\n val_count = int(np.round(preprocessed.shape[0] * val_split))\n print(f'val_count = {val_count}')\n print(f'train labels mean = {np.mean(labels[:-val_count], axis=0)}')\n print(f'val labels mean = {np.mean(labels[-val_count:], axis=0)}')\n\n model = models.classifier()\n\n callbacks = [\n ReduceLROnPlateau(),\n EarlyStopping(patience=4),\n ModelCheckpoint(filepath=f'output/{category}/model.h5', save_best_only=True),\n TensorBoard(log_dir=os.path.join('logs', 'fit', datetime.now().strftime('%Y%m%d-%H%M%S')))\n ]\n\n history = model.fit(preprocessed, labels, callbacks=callbacks, epochs=epochs, verbose=2, validation_split=val_split, batch_size=64)\n # model.save(f'output/{category}/model.h5') not necessary when ModelCheckpoint callback used\n # print(history.history)\n\n val_labels = labels[-val_count:]\n val_predict = (model.predict(preprocessed[-val_count:]) > 0.5) * 1 # turn predictions into integers\n val_predict = val_predict.reshape(val_labels.shape)\n eq = val_labels == val_predict\n neq = val_labels != val_predict\n\n tp = np.sum(eq[val_predict == 1])\n tn = np.sum(eq[val_predict == 0])\n fp = np.sum(neq[val_predict == 1])\n fn = np.sum(neq[val_predict == 0])\n accuracy = (tp + tn) / val_labels.shape[0]\n precision = tp / (tp + fp)\n recall = tp / (tp + fn)\n f1 = 2.0 * precision * recall / (precision + recall)\n\n print('final validation results:')\n print(f'true pos = {tp}')\n print(f'true neg = {tn}')\n print(f'false pos = {fp}')\n print(f'false neg = {fn}')\n print(f'confusion matrix = {tf.math.confusion_matrix(labels[-val_count:], val_predict).numpy().tolist()}')\n # compute manually to check history values\n print(f'accuracy = {accuracy:.4f}')\n print(f'precision = {precision:.4f}')\n print(f'recall = {recall:.4f}')\n print(f'F_1 = {f1:.4f}')\n\n # LaTeX table content\n with open(f'output/{category}/latex_full.txt', 'w') as f:\n if tp > 0:\n f.write(f'\\t\\t{category} & {tp} & {tn} & {fp} & {fn} & {accuracy:.2f} & {precision:.2f} & {recall:.2f} & {f1:.2f} \\\\\\\\\\n')\n else:\n f.write(f'\\t\\t{category} & {tp} & {tn} & {fp} & {fn} & {accuracy:.2f} & 0 & 0 & 0 \\\\\\\\\\n')\n with open(f'output/{category}/latex_{category}.txt', 'w') as f:\n if tp > 0:\n f.write(f'\\t\\tOur Single-Model & {accuracy:.4f} & {precision:.4f} & {recall:.4f} & {f1:.4f} \\\\\\\\\\n')\n else:\n f.write(f'\\t\\tOur Single-Model & {accuracy:.4f} & 0 & 0 & 0 \\\\\\\\\\n')\n\n plot_hist(history, category)",
"def train_model(self, *args, **kwargs):\n raise NotImplementedError",
"def main_strategy_2():\n en_text, de_text, train_iter, dev_iter, _ = clean_data_strategy_2()\n embedding_en, embedding_de = get_GloVe_embedding(en_text, de_text)\n model = Model(len(en_text.vocab), len(de_text.vocab), 300, embedding_en, embedding_de)\n train(model, train_iter, dev_iter)",
"def load_model(base_model, id2label=id2label, label2id=label2id, max_length=128):\n print(f\"Loading model {base_model}\")\n model = AutoModelForSequenceClassification.from_pretrained(\n base_model, return_dict=True, num_labels=len(id2label)\n )\n\n tokenizer = AutoTokenizer.from_pretrained(base_model)\n tokenizer.model_max_length = max_length\n\n #model.config.hidden_dropout_prob = 0.20\n model.config.id2label = id2label\n model.config.label2id = label2id\n\n if base_model not in dont_add_tokens:\n vocab = tokenizer.get_vocab()\n new_tokens_to_add = [tok for tok in special_tokens if tok not in tokenizer.get_vocab()]\n\n if new_tokens_to_add:\n \"\"\"\n TODO: Perdoname Wilkinson, te he fallado\n\n Hay una interfaz diferente acá, no entiendo bien por qué\n \"\"\"\n if hasattr(tokenizer, \"is_fast\") and tokenizer.is_fast:\n tokenizer.add_special_tokens({'additional_special_tokens': new_tokens_to_add})\n else:\n tokenizer.add_special_tokens(new_tokens_to_add)\n model.resize_token_embeddings(len(tokenizer))\n return model, tokenizer",
"def train_model(self):\n if not self.is_exist(self.path_model_directory):\n # Then create the parent folder\n os.makedirs(self.path_model_directory)\n\n # Create a meta-data pickle for the model\n self.create_meta_data_pickle()\n\n # Necessary meta-data file must be created before starting the training. Check if the file exists\n if self.is_exist(self.path_model_metadata):\n\n # We do not need to train a model if there is already a best model for the same training exist\n try:\n self.model = load_model(self.path_best_model)\n return\n except:\n self.log_event('There is no best trained model found in the parent folder. Going with the training...')\n\n # Load the model meta-data\n self.load_model_metadata()\n self.encoding_vector_size = self.number_of_distinct_items\n\n # Iterate trough the split data for the training\n for split_number in range(self.k_split):\n split_path = f'split_{str(split_number)}/'\n split_directory = self.path_model_directory + split_path\n\n # Check the split directory is already created. If it is, then we can directly start the training by using the existing data\n if self.is_exist(split_directory):\n try:\n self.load_best_tuned_model(split_number)\n except (IndexError, FileNotFoundError):\n self.load_fold_k_data_and_fit(split_number=int(split_number))\n\n else:\n # Create a folder for the split data and prepare the data for the training\n os.makedirs(split_directory)\n\n # Create an array which will contain train features-labels and test features-labels\n train_array = np.full(4, fill_value=self.mask_value, dtype=object)\n train_index = 0\n for position, split_name in enumerate(['train_split_', 'test_split_']):\n training_features_directory = split_directory + f'{split_name}{str(split_number)}_all_training_features.data'\n training_targets_directory = split_directory + f'{split_name}{str(split_number)}_all_training_targets.data'\n fold_directory = self.path_shared_folds + f'{split_name}{str(split_number)}.fold'\n\n self.process_training_data(fold_directory=fold_directory)\n\n self.save_data_to_disk(data_to_save=self.all_features, path_to_save=training_features_directory)\n train_array[train_index] = self.all_features\n train_index += 1\n self.all_features = None # Memory Management\n\n self.save_data_to_disk(data_to_save=self.all_targets, path_to_save=training_targets_directory)\n train_array[train_index] = self.all_targets\n train_index += 1\n self.all_targets = None # Memory Management\n\n # Assign the input data to respective variables for the training\n self.train_features = train_array[0]\n self.train_targets = train_array[1]\n self.test_features = train_array[2]\n self.test_targets = train_array[3]\n del train_array\n\n self.start_hyper_parameter_tuning(split_number)\n\n self.retrieve_best_model(metric=self.hyper_parameters['metric'])",
"def train_model(self,train_data,eval_data=None): # noqa: ignore flake8\"\n logger.info(\"Training model...\")\n os.makedirs(self.model_dir, exist_ok=True)\n source_texts, target_texts = create_dataset(train_data)\n\n self.src_2_ids = read_vocab(source_texts)\n self.trg_2_ids = read_vocab(target_texts)\n save_word_dict(self.src_2_ids, self.src_vocab_path)\n save_word_dict(self.trg_2_ids, self.trg_vocab_path)\n train_src, train_trg = one_hot(source_texts, target_texts, self.src_2_ids, self.trg_2_ids, sort_by_len=True)\n\n id_2_srcs = {v: k for k, v in self.src_2_ids.items()}\n id_2_trgs = {v: k for k, v in self.trg_2_ids.items()}\n logger.debug(f'train src: {[id_2_srcs[i] for i in train_src[0]]}')\n logger.debug(f'train trg: {[id_2_trgs[i] for i in train_trg[0]]}')\n\n self.model = Seq2Seq(\n encoder_vocab_size=len(self.src_2_ids),\n decoder_vocab_size=len(self.trg_2_ids),\n embed_size=self.embed_size,\n enc_hidden_size=self.hidden_size,\n dec_hidden_size=self.hidden_size,\n dropout=self.dropout\n )\n self.model.to(device)\n logger.debug(self.model)\n optimizer = torch.optim.Adam(self.model.parameters())\n\n train_data = gen_examples(train_src, train_trg, self.batch_size, self.max_length)\n train_losses = []\n best_loss = 1e3\n for epoch in range(self.epochs):\n self.model.train()\n total_num_words = 0.\n total_loss = 0.\n for it, (mb_x, mb_x_len, mb_y, mb_y_len) in enumerate(train_data):\n mb_x = torch.from_numpy(mb_x).to(device).long()\n mb_x_len = torch.from_numpy(mb_x_len).to(device).long()\n mb_input = torch.from_numpy(mb_y[:, :-1]).to(device).long()\n mb_output = torch.from_numpy(mb_y[:, 1:]).to(device).long()\n mb_y_len = torch.from_numpy(mb_y_len - 1).to(device).long()\n mb_y_len[mb_y_len <= 0] = 1\n\n mb_pred, attn = self.model(mb_x, mb_x_len, mb_input, mb_y_len)\n\n mb_out_mask = torch.arange(mb_y_len.max().item(), device=device)[None, :] < mb_y_len[:, None]\n mb_out_mask = mb_out_mask.float()\n\n loss = self.loss_fn(mb_pred, mb_output, mb_out_mask)\n\n num_words = torch.sum(mb_y_len).item()\n total_loss += loss.item() * num_words\n total_num_words += num_words\n\n # update optimizer\n optimizer.zero_grad()\n loss.backward()\n torch.nn.utils.clip_grad_norm_(self.model.parameters(), 5.)\n optimizer.step()\n\n if it % 100 == 0:\n logger.debug(\"Epoch :{}/{}, iteration :{}/{} loss:{:.4f}\".format(epoch, self.epochs,\n it, len(train_data),\n loss.item()))\n cur_loss = total_loss / total_num_words\n train_losses.append(cur_loss)\n logger.debug(\"Epoch :{}/{}, Training loss:{:.4f}\".format(epoch, self.epochs, cur_loss))\n if epoch % 1 == 0:\n # find best model\n is_best = cur_loss < best_loss\n best_loss = min(cur_loss, best_loss)\n if is_best:\n self.save_model()\n logger.info('Epoch:{}, save new bert model:{}'.format(epoch, self.model_path))\n if eval_data:\n self.eval_model(eval_data)\n\n\n return train_losses",
"def load_data(dataset_path, word2vec_model_path, n_class=2, max_seq_len_cutoff=50):\n\n dataset_file = open(dataset_path, \"r\", encoding='utf-8')\n dataset_content = dataset_file.readlines()\n\n x_text = []\n y = []\n for element in dataset_content:\n element = element.lower()\n element = element.split(\"\\t\")\n label = int(element[0])\n text = element[1].strip()\n if (len(text) == 0):\n continue\n x_text.append(text)\n tmp_lable = np.zeros(n_class)\n if(n_class == 2):\n tmp_lable[label] = 1\n else:\n tmp_lable[label - 1] = 1\n y.append(tmp_lable)\n\n\n x_text = clean_str(x_text, max_seq_len_cutoff)\n\n sequence_length = max(len(x) for x in x_text)\n\n vocabulary, vocabulary_inv = build_vocab(x_text)\n y = np.asarray(y)\n\n word2vec_Model = Load_Model(word2vec_model_path)\n word2vec_vocab = word2vec_Model.vocab\n word2vec_vec = word2vec_Model.syn0\n\n print(\"word2vec len is: \", len(word2vec_vec))\n tmp = word2vec_vocab['real']\n tmp1 = copy.deepcopy(tmp)\n word_vector = np.random.uniform(low=-0.25, high=0.25, size=(1,word2vec_vec.shape[1]))\n word2vec_vec = np.append(word2vec_vec, word_vector, axis=0)\n tmp1.index = len(word2vec_vec)-1\n word2vec_vocab['<un_known>'] = tmp1\n\n return [x_text, y, sequence_length, vocabulary, vocabulary_inv, word2vec_vocab, word2vec_vec]",
"def train_model(self, text, labels):\n clf = svm.SVR()\n count_vect = CountVectorizer()\n tfidf_transformer = TfidfTransformer()\n counts = count_vect.fit_transform(text)\n tfidf = tfidf_transformer.fit_transform(counts)\n clf.fit(tfidf, labels)\n\n return clf, count_vect, tfidf_transformer",
"def build_vocab(self, sentences, keep_raw_vocab=False, trim_rule=None, progress_per=10000, update=False):\n print(\"build------------------\")\n self.scan_vocab(sentences, progress_per=progress_per, trim_rule=trim_rule) # initial survey\n # trim by min_count & precalculate downsampling\n self.scale_vocab(trim_rule=trim_rule, update=update)\n self.finalize_vocab(update=update)",
"def prepare(self):\n # get data from file\n train_data, test_data = return_speechacts()\n # y are the speechacts or 'labels'\n y_train = [t.split(' ')[0] for t in train_data]\n y_test = [t.split(' ')[0] for t in test_data]\n # x are the sentences\n x_train = [\" \".join(t.split(' ')[1:]) for t in train_data]\n x_test = [\" \".join(t.split(' ')[1:]) for t in test_data]\n # use the tokenizer and padding from keras to assign arrays of integers\n # to sentences, out of vocabulary token is 1\n self.tokenizer_x = Tokenizer(oov_token=1)\n self.tokenizer_x.fit_on_texts(x_train + x_test)\n xt_train = self.tokenizer_x.texts_to_sequences(x_train)\n xt_train = pad_sequences(xt_train, maxlen=self.sentence_size,\n dtype='int32')\n xt_test = self.tokenizer_x.texts_to_sequences(x_test)\n xt_test = pad_sequences(xt_test, maxlen=self.sentence_size,\n dtype='int32')\n # vocab is the number of words in our vocabulary\n self.vocab = len(self.tokenizer_x.word_index) + 1\n # do the same for labels\n self.tokenizer_y = Tokenizer()\n self.tokenizer_y.fit_on_texts(y_train + y_test)\n yt_train = self.tokenizer_y.texts_to_sequences(y_train)\n yt_train = [t[0] for t in yt_train]\n yt_train = to_categorical(yt_train)\n yt_test = self.tokenizer_y.texts_to_sequences(y_test)\n yt_test = [t[0] for t in yt_test]\n yt_test = to_categorical(yt_test)\n self.x_train = x_train\n self.y_train = y_train\n self.x_test = x_test\n self.y_test = y_test\n self.xt_train = xt_train\n self.yt_train = yt_train\n self.xt_test = xt_test\n self.yt_test = yt_test",
"def createModel(self, X_train, y_train):\n total_words = len(self.tokenizer.word_index) + 1\n # Create model and layers\n model = Sequential()\n model.add(Embedding(total_words, 100, input_length=self.max_sequence_len-1))\n model.add(Bidirectional(LSTM(150)))\n model.add(Dense(total_words, activation=\"softmax\"))\n # Compile model\n model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.01), metrics=['accuracy'])\n # Fit model to training data\n fitting = model.fit(X_train, y_train, epochs=100, verbose=1, callbacks=[self.callback])\n return model",
"def train(self, corpus):\n self.tokens = []\n self.tags = []\n sentences = corpus.split(NEW_LINE)\n for sentence in sentences:\n start = START_SIGHT + SLASH + START_SIGHT + SPACE + START_SIGHT + SLASH + START_SIGHT + SPACE\n end = SPACE + END + SLASH + END\n sentence = start + sentence + end \n tokens = sentence.split(SPACE)\n for t in tokens:\n token = t.rsplit(SLASH, 1)\n if (len(token) > 1):\n self.tokens.append(token) \n self.tags.append(token[TAG_INDEX])\n \n nonsense_cases = set([(END, START_SIGHT), (START_SIGHT, END),\n (START_SIGHT, START_SIGHT, END),\n (END, START_SIGHT, START_SIGHT)])\n self.bigram_tags = [b for b in zip(self.tags[:-1], self.tags[1:]) if b not in nonsense_cases]\n self.trigram_tags = [t for t in zip(self.tags[:-1], self.tags[1:], self.tags[2:])\\\n if not (t[WORD_INDEX], t[TAG_INDEX]) in nonsense_cases and\\\n not (t[WORD_INDEX], t[TAG_INDEX]) in nonsense_cases]",
"def __init__(self):\n self.sentiment_map = {0: \"negative\", 1: \"positive\"}\n self.tokenizer = AutoTokenizer.from_pretrained(\"MilaNLProc/feel-it-italian-sentiment\")\n self.model = AutoModelForSequenceClassification.from_pretrained(\"MilaNLProc/feel-it-italian-sentiment\")\n self.model.eval()\n self.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')",
"def predict(cls, input):\n clf = cls.get_model() \n\n input.to_csv(data_dir + 'vdok_predction_src_file.csv')\n\n q = qa_serializer_lang_selector(data_dir)\n q.serialize_record('vdok_predction_src_file.csv', task_name)\n q.select_lang([1], task_name).to_csv(data_dir + data_file, encoding= 'latin1')\n\n pipeline=['pos', 'lemma', 'synset', 'hype', 'hypo']\n\n bnlqd = fex_basic_nlp(data_file, data_dir)\n bnlqd.nlp_run(pipeline[0])\n bnlqd.nlp_run(pipeline[1])\n bnlqd.df_ac_lemma.to_csv(data_dir + 'Lemma-' + data_file, encoding= 'latin1')\n bnlqd.nlp_run(pipeline[2])\n bnlqd.df_ac_synset.to_csv(data_dir + 'Synset-' + data_file , encoding= 'latin1')\n bnlqd.nlp_run(pipeline[3])\n bnlqd.df_ac_hypernyms.to_csv(data_dir + 'Hypernyms-' + data_file, encoding= 'latin1')\n bnlqd.nlp_run(pipeline[4])\n bnlqd.df_ac_hyponyms.to_csv(data_dir + 'Hyponyms-' + data_file, encoding= 'latin1')\n\n bnlpd = fex_basic_nlp(def_file, data_dir, task_name)\n bnlpd.nlp_run(pipeline[0])\n bnlpd.nlp_run(pipeline[1])\n bnlpd.df_ac_lemma.to_csv(data_dir + 'Lemma-P-' + data_file, encoding= 'latin1')\n \n btgqd = bi_trigram(data_file, data_dir)\n btgqd.nlp_run(r'bigram')\n btgqd.nlp_run(r'trigram') \n\n stop_words_d = cls.remove_non_extracted_stop_word(bnlqd.df_ac_lemma, stop_words)\n\n oanc_shelve = oanc_resource + 'ANC-all-lemma-04262014.db'\n oalqd = odi_oanc_lemma_frequency(data_file, oanc_shelve, None, data_dir, stop_words_d) \n oalqd.oanc_lemma_frequency('Lemma-' + data_file, 'Student_Question_Index', 'Pre_Col_Name')\n \n stop_words_hy_d = cls.remove_non_extracted_stop_word(bnlqd.df_ac_lemma, stop_words_hy)\n\n ovlqd = odi_overlapping(data_file, def_file, data_dir, stop_words_d)\n ovlqd.count_overlapping('Lemma-' + data_file, 'Student_Question_Index',\n 'Pre_Col_Name', 'Question_ID', 'Question_ID_Sec',\n 'Lemma-P-' + data_file, 'Question_ID', 'Question_ID_Sec')\n ovlqd.count_overlapping_synset('Synset-' + data_file)\n ovlqd.count_overlapping_hypernyms('Hypernyms-' + data_file, stop_words_hy_d)\n ovlqd.count_overlapping_hyponyms('Hyponyms-' + data_file, stop_words_hy_d)\n\n df_ac_pmi_dist_bigram = cls.bi_trigram_pmi_distribution(pmi_bigram_file, data_dir, \n bnlqd.num_clm_in, btgqd.df_ac_bigram, 'bigram')\n df_ac_pmi_dist_trigram = cls.bi_trigram_pmi_distribution(pmi_trigram_file, data_dir, \n bnlqd.num_clm_in, btgqd.df_ac_trigram, 'Trigram')\n\n df_ac_aggregate = cls.aggregate_plim(bnlqd, oalqd, ovlqd, df_ac_pmi_dist_bigram, df_ac_pmi_dist_trigram,\n bnlpd, specific_count_lemmas, stop_words_pos, task_name)\n df_ac_aggregate.to_csv(data_dir + 'vdok_predction_Aggregate_plim.csv', encoding= 'latin1')\n df_ac_aggregate_item_level = cls.aggregate_item_level_plim(df_ac_aggregate, oalqd.stem_option_name_clm, \n task_name)\n df_ac_aggregate_item_level.to_csv(data_dir + 'vdok_predction_Key_Stem_Passage_Aggregate_plim.csv',\n encoding= 'latin1')\n\n rfrpod = tmv_RF_classify('Independent_Variable_w_Label-Def.csv', data_dir)\n rfrpod.load_data('vdok_predction_Key_Stem_Passage_Aggregate_plim.csv', True, drop_vars, dependent_var)\n clf.perform_prediction(rfrpod.df_ac_modeling_values)\n return clf.df_ac_classified",
"def predict():\n\n predict_cfg = get_predict_args()\n device = get_device()\n print(device)\n\n # load checkpoint\n ckpt_path = find_ckpt_in_directory(predict_cfg.ckpt)\n ckpt = torch.load(ckpt_path, map_location=device)\n best_iter = ckpt[\"best_iter\"]\n cfg = ckpt[\"cfg\"]\n aspect = cfg[\"aspect\"]\n\n for k, v in cfg.items():\n print(\"{:20} : {:10}\".format(k, str(v)))\n\n eval_batch_size = 64\n\n print(\"Loading data\")\n dev_data = list(beer_reader(cfg[\"dev_path\"]))\n test_data = beer_annotations_reader(cfg[\"test_path\"], aspect=aspect)\n\n print(\"dev\", len(dev_data))\n print(\"test\", len(test_data))\n\n print(\"Loading pre-trained word embeddings\")\n vocab = Vocabulary()\n vectors = load_embeddings(cfg[\"embeddings\"], vocab) # required for vocab\n\n # build model\n model = build_model(cfg[\"model\"], vocab, cfg=cfg)\n\n # load parameters from checkpoint into model\n print(\"Loading saved model..\")\n model.load_state_dict(ckpt[\"state_dict\"])\n model.to(device)\n print(\"Done\")\n\n print(model)\n print_parameters(model)\n\n print(\"Evaluating\")\n dev_eval = evaluate_loss(\n model, dev_data, batch_size=eval_batch_size,\n device=device, cfg=cfg)\n test_eval = evaluate_loss(\n model, test_data, batch_size=eval_batch_size,\n device=device, cfg=cfg)\n\n if hasattr(model, \"z\"):\n path = os.path.join(\n cfg[\"save_path\"], \"final_rationales.txt\")\n test_precision, test_macro_prec = evaluate_rationale(\n model, test_data, aspect=aspect, device=device,\n batch_size=eval_batch_size, path=path)\n else:\n test_precision = 0.\n test_macro_prec = 0.\n test_eval[\"precision\"] = test_precision\n test_eval[\"macro_precision\"] = test_macro_prec\n\n dev_s = make_kv_string(dev_eval)\n test_s = make_kv_string(test_eval)\n\n print(\"best model iter {:d} dev {} test {}\".format(\n best_iter, dev_s, test_s))",
"def test_text_classifier_train(self):\n pass",
"def train_model(\n fname: Path,\n save_name: Path,\n batch_size: int = 32,\n warmup_steps: int = 100,\n steps: int = 1000,\n num_heads: int = 2,\n model_dim: int = 128,\n key_dim: int = 128,\n value_dim: int = 128,\n dropout: float = 0.1,\n num_mask: int = 9,\n):\n seqs = read_fasta(fname)\n\n X = seqs_to_integer(seqs)\n\n X = torch.from_numpy(X).type(torch.LongTensor)\n\n X_train, X_test = random_split(X)\n\n train_params = {\n \"batch_size\": batch_size,\n \"lr\": 0.0005,\n \"weight_decay\": 0.0,\n \"warmup_steps\": warmup_steps,\n \"steps\": steps,\n }\n model_params[\"n_head\"] = num_heads\n model_params[\"d_model\"] = model_dim\n model_params[\"d_k\"] = key_dim\n model_params[\"d_v\"] = value_dim\n model_params[\"dropout\"] = dropout\n model_params[\"num_mask\"] = num_mask\n\n model = BERT(**model_params)\n\n optimizer = Adam(\n model.parameters(),\n lr=train_params[\"lr\"],\n weight_decay=train_params[\"weight_decay\"],\n )\n\n scheduler = WarmupAnnealLR(optimizer, warmup_steps=train_params[\"warmup_steps\"])\n\n train(\n model,\n X_train,\n X_test,\n save_name,\n batch_size=train_params[\"batch_size\"],\n optimizer=optimizer,\n scheduler=scheduler,\n steps=train_params[\"steps\"],\n pbar_increment=10,\n )",
"def train():\n pass",
"def train(self, examples):\n print(examples)\n # first we will do gensim to get word embeddings\n tokens = []\n for example in examples:\n for tuple in example:\n tokens.append([tuple[0]])\n self.model = Word2Vec(tokens, min_count=1, size=100).wv\n # shuffle the examples so that they are gone through 'randomly'\n #print(examples)\n random.shuffle(examples)\n #print(examples)\n # iterate through our examples\n for j in range(len(examples)):\n # the stored label for the previous token\n prev_label = None\n prev_word = None\n # iterate through our tokens for the example\n for i in range(len(examples[j])):\n # store our token and its label\n token = examples[j][i][0]\n y = examples[j][i][1]\n # get the features for our current token\n next_word = None\n if i <= (len(examples)-1):\n next_word = examples[j][i+1][0]\n features = self.featurize(prev_label, prev_word, token, next_word)\n # set our previous label to our current since\n # we are done featurizing and need to store it for\n # the next iteration\n prev_label = y\n # a dictionary that will store our z values\n z = {}\n # calculate our z value for every state for\n # the example we are on\n # z(state) = features * weights\n # z[state] = np.dot(features, weights[state])\n for state in self.states:\n z[state] = np.dot(features, self.weights[state])\n # store our max\n max = -1\n # store our y_hat\n y_hat = None\n # store our probabilities\n prob = {}\n # this runs softmax on our z's\n # y_hat = softmax(z)\n denom = sum(np.exp(np.array(list(z.values()))))\n for state in self.states:\n # softmax = p(state) = e^z[state] / (sum[e^z for all z's)\n # making sure this works the way I want it to, should\n # be three values\n #print(np.array(list(z.values())))\n #print(np.exp(np.array(list(z.values()))))\n prob[state] = np.exp(z[state]) / denom\n # if our current prob is greater than the others then it is our boy\n if prob[state] > max:\n # save the new prob as the max\n max = prob[state]\n # save the state as our prediction y_hat\n y_hat = state\n # this will hold our gradients for all the states\n gradients = {}\n for state in self.states:\n # gradient[state] = ((y_hat == state) - prob[state]) * features\n gradients[state] = ((y_hat == state) - prob[state]) * features\n # weights[state] -= loss * gradients\n self.weights[state] -= self.loss * gradients[state]",
"def train(\n model_path=\"./trained_model/\",\n model_file_name=\"model.h5\",\n training_data_path=\"./train.csv\",\n):\n config = SConfig(training_data_path=training_data_path)\n s2s = Seq2Seq(config)\n s2s.fit()\n s2s.save_model(path_to_model=model_path, model_file_name=model_file_name)",
"def skipgram(init,\n load,\n sg_model_path,\n sg_model_name,\n save_kv,\n sg_model_config,\n train,\n epochs,\n similarity,\n accuracy):\n\n # allows display info\n logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)\n\n # define some path variable to clean the code\n path_to_model_dir = os.path.join(sg_model_path, sg_model_name)\n path_to_model_file = os.path.join(path_to_model_dir, sg_model_name + \".model\")\n path_to_keyed_vectors_file = os.path.join(path_to_model_dir, sg_model_name + \".kv\")\n\n # use a memory-friendly iterator\n sentences = MyReviews(nb_reviews=NB_REVIEWS)\n\n if init and not load:\n # sentences / corpus = None so the model is left uninitialized\n # iter = 1 to make sure to have an uninitialized model\n # sample = The threshold for configuring which higher-frequency words are randomly downsampled, useful range is (0, 1e-5).\n model = Word2Vec(sentences=sentences,\n sg=1,\n iter=1,\n size=sg_model_config[\"size\"],\n window=sg_model_config[\"window\"],\n sample=sg_model_config[\"sample\"],\n min_count=sg_model_config[\"min_count\"],\n hs=sg_model_config[\"hs\"],\n negative=sg_model_config[\"negative\"],\n workers=sg_model_config[\"workers\"])\n\n # save the model after initialization\n model.save(path_to_model_file)\n\n elif load:\n # load the model\n model = Word2Vec.load(path_to_model_file)\n\n else:\n # the user is informed that he has to choise init or load arguments\n raise RuntimeError(\"You have either to choose parameter -init or -load\")\n\n if train:\n # train the model\n model.train(sentences=sentences,\n total_examples=model.corpus_count,\n epochs=epochs)\n\n # always save the model after training\n model.save(path_to_model_file)\n\n if save_kv:\n # save vectors representation of words\n model.wv.save(path_to_keyed_vectors_file)\n\n if similarity != \"\":\n # evaluate the model by similarity search for one word\n print(\"Words similar to \", similarity)\n print(model.most_similar(positive=[similarity]))\n\n if accuracy:\n model.wv.accuracy(questions=PATH_TO_QUESTIONS_WORDS_FILE)",
"def load_model(self):\n for t in self.topic:\n if t != \"other\":\n print(\"Loading models of topic: \", t)\n for st in self.topic2sub_topic[t].keys():\n model = tf.keras.Sequential()\n model.add(tf.keras.layers.InputLayer(input_shape=[1024, ]))\n model.add(tf.keras.layers.Dense(64, activation='relu'))\n model.add(tf.keras.layers.Dense(64, activation='relu'))\n model.add(tf.keras.layers.Dense(1, activation='relu'))\n model.compile(loss='mean_squared_logarithmic_error', optimizer='adam', metrics=[metrics.mae, metrics.categorical_accuracy])\n\n if not os.path.exists(self.trained_w_folder+\"/%s/%s.h5\" %(t,st)):\n print(\"Now training the classsfier for topic: \", t, \" ; intent: \", st)\n print(64 * \"=\")\n X, y = self.get_data(t, st)\n print(\"data_loaded!\")\n X_train, X_dev, y_train, y_dev = self.my_train_test_split(X, y)\n model.fit(X_train, y_train, epochs=3, batch_size=128)\n model.save_weights(self.trained_w_folder+\"/%s/%s.h5\" %(t,st))\n print(\"f1_score on dev set: \")\n self.f1_score_model(model, X_dev, y_dev)\n print(64*\"=\")\n print()\n else:\n model.load_weights(self.trained_w_folder+\"/%s/%s.h5\" %(t,st))\n print(\"Loaded weights for model \" + t + \" : \" + st)\n self.model_zoo[t][st] = model",
"def __init__(self, corpus):\n self.train(corpus)",
"def train(self, model_type, params=None):\n Model = load_model_class(model_type)\n self.model_type = model_type\n X, y = self.task.make_dataset()\n self.final_data = X.copy()\n # Save preds\n preds = np.zeros_like(y.values).astype(np.float)\n with TMPFolder():\n N = len(X)\n n = N // self.cv\n # Assign a fold to each sample\n folds = np.random.permutation(np.repeat(np.arange(self.cv), n+1)[:N])\n if self.cv == 1:\n folds[:] = 1\n folds[np.random.permutation(np.arange(N))[:int(round(0.25 * N))]] = 0\n # Iterate over folds\n for k in range(self.cv):\n print(\"Fold\", k)\n # Create model\n model = Model()\n if params is not None:\n model.set_hp(params)\n # Create sub-dataset\n X_train = X[folds != k]\n y_train = y[folds != k]\n X_test = X[folds == k]\n y_test = y[folds == k]\n # Train the model\n model.train(X_train, y_train)\n # Make predictions on test samples\n y_pred = model.predict(X_test)\n # Save the predictions\n preds[folds == k] = y_pred\n self.model_save.append(model)\n # Save folds\n self.folds = folds\n self.is_trained = True\n self.preds = preds\n self.true_labels = y",
"def model(features, test_features, encoding='ohe', n_folds=5):\n\n # Extract the ids\n train_ids = features['SK_ID_CURR']\n test_ids = test_features['SK_ID_CURR']\n\n # Extract the labels for training\n labels = features['TARGET']\n\n # Remove the ids and target\n features = features.drop(columns=['SK_ID_CURR', 'TARGET'])\n test_features = test_features.drop(columns=['SK_ID_CURR'])\n\n # One Hot Encoding\n if encoding == 'ohe':\n features = pd.get_dummies(features)\n test_features = pd.get_dummies(test_features)\n\n # Align the dataframes by the columns\n features, test_features = features.align(test_features, join='inner', axis=1)\n\n # No categorical indices to record\n cat_indices = 'auto'\n\n # Integer label encoding\n elif encoding == 'le':\n\n # Create a label encoder\n label_encoder = LabelEncoder()\n\n # List for storing categorical indices\n cat_indices = []\n\n # Iterate through each column\n for i, col in enumerate(features):\n if features[col].dtype == 'object':\n # Map the categorical features to integers\n features[col] = label_encoder.fit_transform(np.array(features[col].astype(str)).reshape((-1,)))\n test_features[col] = label_encoder.transform(np.array(test_features[col].astype(str)).reshape((-1,)))\n\n # Record the categorical indices\n cat_indices.append(i)\n\n # Catch error if label encoding scheme is not valid\n else:\n raise ValueError(\"Encoding must be either 'ohe' or 'le'\")\n\n print('Training Data Shape: ', features.shape)\n print('Testing Data Shape: ', test_features.shape)\n\n # Extract feature names\n feature_names = list(features.columns)\n\n # Convert to np arrays\n features = np.array(features)\n test_features = np.array(test_features)\n\n # Create the kfold object\n k_fold = KFold(n_splits=n_folds, shuffle=True, random_state=50)\n\n # Empty array for feature importances\n feature_importance_values = np.zeros(len(feature_names))\n\n # Empty array for test predictions\n test_predictions = np.zeros(test_features.shape[0])\n\n # Empty array for out of fold validation predictions\n out_of_fold = np.zeros(features.shape[0])\n\n # Lists for recording validation and training scores\n valid_scores = []\n train_scores = []\n\n # Iterate through each fold\n for train_indices, valid_indices in k_fold.split(features):\n # Training data for the fold\n train_features, train_labels = features[train_indices], labels[train_indices]\n # Validation data for the fold\n valid_features, valid_labels = features[valid_indices], labels[valid_indices]\n\n # Create the model\n model = lgb.LGBMClassifier(n_estimators=10000, objective='binary',\n class_weight='balanced', learning_rate=0.05,\n reg_alpha=0.1, reg_lambda=0.1,\n subsample=0.8, n_jobs=-1, random_state=50)\n\n # Train the model\n model.fit(train_features, train_labels, eval_metric='auc',\n eval_set=[(valid_features, valid_labels), (train_features, train_labels)],\n eval_names=['valid', 'train'], categorical_feature=cat_indices,\n early_stopping_rounds=100, verbose=200)\n\n # Record the best iteration\n best_iteration = model.best_iteration_\n\n # Record the feature importances\n feature_importance_values += model.feature_importances_ / k_fold.n_splits\n\n # Make predictions\n test_predictions += model.predict_proba(test_features, num_iteration=best_iteration)[:, 1] / k_fold.n_splits\n\n # Record the out of fold predictions\n out_of_fold[valid_indices] = model.predict_proba(valid_features, num_iteration=best_iteration)[:, 1]\n\n # Record the best score\n valid_score = model.best_score_['valid']['auc']\n train_score = model.best_score_['train']['auc']\n\n valid_scores.append(valid_score)\n train_scores.append(train_score)\n\n # Clean up memory\n gc.enable()\n del model, train_features, valid_features\n gc.collect()\n\n # Make the submission dataframe\n submission = pd.DataFrame({'SK_ID_CURR': test_ids, 'TARGET': test_predictions})\n\n # Make the feature importance dataframe\n feature_importances = pd.DataFrame({'feature': feature_names, 'importance': feature_importance_values})\n\n # Overall validation score\n valid_auc = roc_auc_score(labels, out_of_fold)\n\n # Add the overall scores to the metrics\n valid_scores.append(valid_auc)\n train_scores.append(np.mean(train_scores))\n\n # Needed for creating dataframe of validation scores\n fold_names = list(range(n_folds))\n fold_names.append('overall')\n\n # Dataframe of validation scores\n metrics = pd.DataFrame({'fold': fold_names,\n 'train': train_scores,\n 'valid': valid_scores})\n\n return submission, feature_importances, metrics",
"def retrain_sub_model(self):\r\n \r\n self.sub_model = self.load_weights_to_sub_model()\r\n X = np.array(self.conv4_characters_list)\r\n X = np.reshape(X, (X.shape[0]*X.shape[1], X.shape[2]))\r\n y = np.repeat(np.arange(1283), 9)\r\n \r\n opt = optimizers.Adam(lr=0.001)\r\n self.sub_model.compile(optimizer=opt,loss='sparse_categorical_crossentropy',metrics=['accuracy'])\r\n print(\"***Start to creat new decision model***\")\r\n self.sub_model.fit(X, y, epochs=20)\r\n print(\"***Finish***\")",
"def train_model(args, train_exs: List[SentimentExample]) -> SentimentClassifier:\n # Initialize feature extractor\n nltk.download('stopwords')\n stop_words = set(stopwords.words('english'))\n\n if args.model == \"TRIVIAL\":\n feat_extractor = None\n elif args.feats == \"UNIGRAM\":\n feat_extractor = UnigramFeatureExtractor(Indexer(), train_exs, stop_words)\n elif args.feats == \"BIGRAM\":\n # Add additional preprocessing code here\n feat_extractor = BigramFeatureExtractor(Indexer(), train_exs, stop_words)\n elif args.feats == \"BETTER\":\n # Add additional preprocessing code here\n feat_extractor = BetterFeatureExtractor(Indexer(), train_exs, stop_words)\n else:\n raise Exception(\"Pass in UNIGRAM, BIGRAM, or BETTER to run the appropriate system\")\n\n # Train the model\n if args.model == \"TRIVIAL\":\n model = TrivialSentimentClassifier()\n elif args.model == \"PERCEPTRON\":\n model = train_perceptron(train_exs, feat_extractor)\n elif args.model == \"LR\":\n model = train_logistic_regression(train_exs, feat_extractor)\n else:\n raise Exception(\"Pass in TRIVIAL, PERCEPTRON, or LR to run the appropriate system\")\n return model",
"def __init__(self):\n self.bigramCounts = collections.defaultdict(lambda : 0)\n self.trigramCounts = collections.defaultdict(lambda : 0)\n self.unigramCounts = collections.defaultdict(lambda : 1)\n self.continuationCounts = collections.defaultdict(lambda: 0)\n self.followingCounts = collections.defaultdict(lambda: 0)\n self.total = 1\n self.totalBigramCounts = 0\n print \"Training Language Model...\"\n self.train(brown.sents())\n print \"--Training Complete--\"",
"def Subtask4_pre_train_5():\n with open(PATH + 'pre_train_4_Subtask4.txt', encoding='utf-8') as fi:\n evi = eval(fi.read())\n\n train_data = np.load(PATH + 'pre_train_2_Subtask4.npy', allow_pickle=True).item()\n model = word2vec.KeyedVectors.load_word2vec_format(PATH + \"data/GoogleNews-vectors-negative300.bin\", binary=True)\n\n with open(PATH + 'pre_train_3_Subtask4.txt', encoding='utf-8') as f:\n document = eval(f.read())\n\n with open(PATH + 'traindata_Subtask4.txt', 'w') as fp:\n for data in train_data.items():\n claim = data[0]\n claim = re.sub(\"[-,.。:_=+*&^%$#@!?()<>/`';|]\", \"\", claim)\n claim = claim.split(' ')\n claim = list(filter(lambda x: x in model.vocab, claim))\n Vi = []\n for i in range(len(claim)):\n Vi.append(model[claim[i]])\n\n V = np.zeros(len(Vi[0]))\n for i in range(len(claim)):\n for j in range(len(Vi[0])):\n V[j] = V[j] + Vi[i][j]\n\n rms = 0\n for i in range(len(Vi[0])):\n rms += V[i] * V[i]\n rms = np.sqrt(rms / len(Vi[0]))\n\n for i in range(len(Vi[0])):\n V[i] = V[i] / rms\n V = V.astype(str).tolist()\n\n for doc in data[1]:\n lines = document[doc].split('\\n')\n for k in range(len(lines)):\n label = [data[0], doc, k]\n line = document[doc].split('\\n')[k]\n if line != str(k) + '\\t':\n line = line.replace(str(k) + '\\t', '')\n line = line.split('\\t')[0]\n line = re.sub(\"[-,.。:_=+*&^%$#@!?()<>/`';|]\", \"\", line)\n line = line.split(' ')\n line = list(filter(lambda x: x in model.vocab, line))\n if len(line) != 0:\n Vi = []\n for i in range(len(line)):\n Vi.append(model[line[i]])\n\n V1 = np.zeros(len(Vi[0]))\n for i in range(len(line)):\n for j in range(len(Vi[0])):\n V1[j] = V1[j] + Vi[i][j]\n\n rms = 0\n for i in range(len(Vi[0])):\n rms += V1[i] * V1[i]\n rms = np.sqrt(rms / len(Vi[0]))\n\n for i in range(len(Vi[0])):\n V1[i] = V1[i] / rms\n V1 = V1.astype(str).tolist()\n\n if label in evi:\n fp.write(' '.join(V) + ' ' + ' '.join(V1) + ' 1' + '\\n')\n else:\n fp.write(' '.join(V) + ' ' + ' '.join(V1) + ' 0' + '\\n')",
"def createBaselineClassifier(self, bigram=False):\n \n tweets, labels = self.read_corpus()\n ten_folds = self.get_n_folds(tweets, labels)\n\n if(bigram):\n print(\"Baseline: tf-idf bigram SVM\")\n self.write_label_to_csv(\"Baseline: tf-idf bigram SVM\")\n bow_transformer = CountVectorizer(analyzer=\"word\", min_df=1, lowercase=False, ngram_range=(1,2))\n else:\n print(\"Baseline: tf-idf unigram SVM\")\n self.write_label_to_csv(\"Baseline: tf-idf unigram SVM\")\n bow_transformer = CountVectorizer(analyzer=\"word\", min_df=2, lowercase=False)\n \n tfidf_transformer = TfidfTransformer()\n document_term_matrix = bow_transformer.fit(tweets)\n vocab = bow_transformer.vocabulary_\n if(bigram):\n print(str(len(vocab)) + \" Uni- and Bigrams found\")\n else:\n print(str(len(vocab)) + \" Unigrams found\")\n\n scores = []\n # Cross validation: \n for i in range(len(ten_folds)):\n \n test_fold = ten_folds[i]\n train_folds = [fold for x, fold in enumerate(ten_folds) if x != i]\n X_test, Y_test = test_fold[0], test_fold[1]\n X_train, Y_train = [], []\n for fold in range(len(train_folds)):\n X_train.extend(train_folds[fold][0])\n Y_train.extend(train_folds[fold][1])\n\n assert(len(X_test) == len(Y_test))\n assert(len(X_train) == len(Y_train))\n \n if(bigram):\n vectorizer = CountVectorizer(analyzer=\"word\", min_df=1, vocabulary=vocab, lowercase=False, ngram_range=(1,2))\n else:\n vectorizer = CountVectorizer(analyzer=\"word\", min_df=2, lowercase=False, vocabulary=vocab)\n \n document_term_matrix_tr = vectorizer.fit_transform(X_train).toarray()\n document_term_matrix_te = vectorizer.fit_transform(X_test).toarray()\n\n X_train_tfidf = tfidf_transformer.fit_transform(document_term_matrix_tr)\n X_test_tfidf = tfidf_transformer.fit_transform(document_term_matrix_te)\n\n le = preprocessing.LabelEncoder()\n Y_train_enc = le.fit_transform(Y_train)\n Y_test_enc = le.fit_transform(Y_test)\n \n clf = svm.SVC(kernel='linear', C=1.0,random_state=1).fit(X_train_tfidf, Y_train_enc)\n \n predicted = clf.predict(X_test_tfidf)\n # scores.append(clf.score(X_test_tfidf, Y_test_enc))\n # print(metrics.classification_report(Y_test_enc, predicted)) \n score = metrics.f1_score(Y_test_enc, predicted, average='weighted')\n print(\"Cross Validation #{0} --> avg. weighted F1-Score: {1}\".format(i+1, score))\n self.write_output_to_csv(i, X_test, Y_test, X_train, Y_train, score)\n scores.append(score) \n\n scores = np.array(scores)\n self.write_score_to_csv(scores)\n print(\"Total Accuracy: %0.2f (+/- %0.2f)\" % (scores.mean(), scores.std() * 2))",
"def build_model(self, text, n = 3):\n \n try:\n self.lm.build_model(text,n)\n except:\n raise\n \n self.vocab = Counter(words(text))\n\n return self.lm",
"def construct_NLP_model(self, df=None):\n import review_processing as rp\n # get words\n if df is not None:\n nitems = df.shape[0]\n col_names = df.columns.values\n if self.review_col_name not in col_names or \\\n self.sentiment_col_name not in col_names:\n sys.exit('construct_NL_model: The name {0}/{1} cannot be found'.\n format(self.review_col_name, self.sentiment_col_name))\n review_list = df[self.review_col_name].values.tolist()\n meaningful_words = map(self.review_to_meaningful_words,\n review_list)\n # Get training sentiment values\n self.sentiment = df[self.sentiment_col_name].values\n\n else:\n if self.training_file_name is None:\n sys.exit('construct_NLP_model: traning file name does not '\n 'exist')\n else:\n suffix = os.path.splitext(self.training_file_name)[1][1:]\n if suffix == 'csv':\n df = pd.read_csv(self.training_file_name)\n if self.review_col_name not in col_names or \\\n self.sentiment_col_name not in col_names::\n sys.exit('construct_NL_model: The name {0}/{1} cannot '\n ' be found'.format(self.review_col_name,\n self.sentiment_col_name))\n nitems = df.shape[0]\n review_list = df[review_col_name].values.tolist()\n meaningful_words = map(self.review_to_meaningful_words,\n review_list)\n elif suffix == 'json':\n data_dict_list = rp.load_data(self.training_file_name)\n if self.review_col_name not in data_dict_list.keys():\n sys.exit('construct_NL_model: The name {0} cannot be '\n 'found'.format(review_col_name))\n review_list = map(lambda x: x[review_col_name],\n data_dict_list)\n meaningful_words = map(self.review_to_meaningful_words,\n review_list)\n else:\n sys.exit('construct_NLP_model: file type not supported '\n 'yet!')\n\n # Training process of Bag of Worlds\n if self.NLP_model == 'BagofWords':\n print('construct_NLP_model: Creating bag of words...')\n self.vectorizer = CountVectorizer(analyzer='word',\n tokenizer=None,\n preprocessor=None,\n stop_words=None,\n max_features=self.maxfeature)\n self.train_data_features = vectorizer.fit_transform(\n meaningful_words)\n self.train_data_features = train_data_features.toarray()\n\n # vocab = vectorizer.get_feature_names()\n # dist = np.sum(train_data_features, axis=0)\n # for tag, count in zip(vocab, dist):\n # print(count, tag)\n\n else:\n sys.exit('construct_NLP_model: NLP_model type not supported yet!')",
"def train(self, x_train, y_train):\n\n # convert input to format for classifier\n list_of_embeddings = list(x_train[self.embeddings_col])\n x_train = np.array([[float(i) for i in embedding.strip('[]').split()] for embedding in list_of_embeddings])\n\n # discard fold ID column from labels\n review_groups = [col for col in y_train.columns if not col=='k']\n\n for review_group in tqdm(review_groups, desc='Train Review Groups'):\n\n # pull label column\n labels = y_train[review_group]\n\n # logistic classifier\n classifier = SGDClassifier(loss=\"log\", alpha=self.alpha,\n l1_ratio = self.l1_ratio, penalty=\"elasticnet\").fit(x_train, labels)\n\n # save the model in dictionary of models\n self.models[review_group] = classifier",
"def load_preprocessed(self):\n with open(self.words_vocab_file, 'rb') as f:\n self.word_to_id, self.unk_word_list = pickle.load(f)\n self.word_vocab_size = len(self.word_to_id)\n\n if self.unit != \"word\":\n with open(self.sub_vocab_file, 'rb') as f:\n if self.unit == \"char\":\n self.max_word_len = self.get_max_word_length(self.word_to_id) + 2\n self.char_to_id, self.unk_char_list, self.max_word_len = pickle.load(f)\n self.subword_vocab_size = len(self.char_to_id)\n elif self.unit == \"char-ngram\":\n self.ngram_to_id, self.unk_char_list, self.unk_ngram_list, \\\n self.max_ngram_per_word = pickle.load(f)\n self.subword_vocab_size = len(self.ngram_to_id)\n elif self.unit == \"morpheme\":\n self.morpheme_to_id, self.unk_char_list, self.unk_morph_list, \\\n self.max_morph_per_word = pickle.load(f)\n self.subword_vocab_size = len(self.morpheme_to_id)\n elif self.unit == \"oracle\":\n self.morpheme_to_id, self.max_morph_per_word = pickle.load(f)\n self.subword_vocab_size = len(self.morpheme_to_id)\n else:\n sys.exit(\"Unknown unit\")",
"def __init__(self, model, src_vocab, tgt_vocab):\n self.max_length = 120\n if torch.cuda.is_available():\n self.model = model.cuda()\n else:\n self.model = model.cpu()\n self.model.eval()\n self.src_vocab = src_vocab\n self.tgt_vocab = tgt_vocab",
"def preprocess(self):\n self.word_to_id, self.unk_word_list = self.build_vocab(mode=\"word\")\n self.word_vocab_size = len(self.word_to_id)\n self.max_word_len = self.get_max_word_length(self.word_to_id)\n # Do not write the same file again\n if not os.path.exists(self.words_vocab_file):\n with open(self.words_vocab_file, 'wb') as f:\n pickle.dump((self.word_to_id, self.unk_word_list), f)\n if self.unit != \"word\":\n self.preprocess_sub_units()",
"def train_model():\n print('Loading the dataset...')\n dataset = pd.read_csv('app/Sentiment_Reviews.csv',index_col=0)\n X = dataset[['Reviews']]\n y = dataset[['Sentiment']]\n le = preprocessing.LabelEncoder()\n le.fit(y)\n y = (le.transform(y))\n X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=38)\n\n print('Training the model...')\n text_clf_svm = Pipeline([('vect', CountVectorizer()), ('tfidf', TfidfTransformer(use_idf=False)), ('clf-svm', SGDClassifier(loss='modified_huber', penalty='l2', alpha=0.001, random_state=42, max_iter=20))])\n text_clf_svm = text_clf_svm.fit(X_train['Reviews'], y_train)\n\n print('Storing model to redis...')\n pickled_model = pickle.dumps(text_clf_svm)\n try:\n redis_client.set('ml_model', pickled_model)\n except RedisError as e:\n print('Storing the model was not successful and threw an error.')\n print(e)",
"def basic_model_init(model_args, task_infos, tokenizer):\n config = AutoConfig.from_pretrained(\n model_args.model_name_or_path,\n num_labels=task_infos.num_labels,\n cache_dir=model_args.model_cache_dir,\n id2label=task_infos.id2label,\n label2id=task_infos.label2id,\n )\n model_cls = getattr(mod, model_args.architectures,\n AutoModelForSequenceClassification)\n model = model_cls.from_pretrained(\n model_args.model_name_or_path,\n config=config,\n cache_dir=model_args.model_cache_dir,\n )\n if model.config.vocab_size < len(tokenizer):\n print(\"resize...\")\n model.resize_token_embeddings(len(tokenizer))\n return model",
"def main():\r\n preprocessor = DATA_PREPROCESSOR('shakespeare-corpus.txt')\r\n corpus = preprocessor.preprocess_data()\r\n plot(corpus)\r\n data, unique_vocab, word_to_idx = create_context(corpus)\r\n\r\n #train model- changed global variable if needed\r\n model=CBOW(len(unique_vocab), EMBEDDING_DIM, CONTEXT_SIZE)\r\n if USE_ADAM:\r\n print('Using adam as optimizer')\r\n optimizer = torch.optim.Adam(model.parameters(), lr=0.001)\r\n else:\r\n print('Using SGD as optimizer')\r\n optimizer = torch.optim.SGD(model.parameters(), lr=0.001)\r\n\r\n checkpoint_file ='checkpoint.pth'\r\n checkpoint_available= os.path.exists(checkpoint_file)\r\n if checkpoint_available:\r\n model, optimizer, current_epoch = reset_model_to_checkpoint(model, optimizer, checkpoint_file)\r\n else:\r\n print('no checkpoint found. initializing new model..\\n')\r\n current_epoch=0 \r\n\r\n executor = MODEL_EXECUTOR(model)\r\n if RESUME_TRAINING or not checkpoint_available:\r\n print('resuming training...\\n')\r\n import time\r\n start_time = time.time()\r\n cbow = executor.train(optimizer, data, unique_vocab, word_to_idx, current_epoch, checkpoint_file)\r\n print(\"--- %s seconds ---\" % (time.time() - start_time))\r\n else:\r\n print('pre-trained model loaded. no further training...\\n')\r\n\r\n # get two words similarity\r\n executor.test(unique_vocab,word_to_idx)\r\n\r\n show_closest_words(cbow, word_to_idx,unique_vocab)",
"def load_model():\n global clf\n mdl = joblib.load('model/chitchat.model')\n vec = joblib.load('model/tfidf.vectorizer')\n clf = ChitChatClassifier(mdl, vec)",
"def model_final(input_shape, output_sequence_length, english_vocab_size, french_vocab_size):\n # TODO: Implement\n\n # Hyperparameters\n learning_rate = 0.003\n \n # Build the layers \n model = Sequential()\n # Embedding\n model.add(Embedding(english_vocab_size, 128, input_length=input_shape[1],\n input_shape=input_shape[1:]))\n # Encoder\n model.add(Bidirectional(GRU(128)))\n model.add(RepeatVector(output_sequence_length))\n # Decoder\n model.add(Bidirectional(GRU(128, return_sequences=True)))\n model.add(TimeDistributed(Dense(512, activation='relu')))\n model.add(Dropout(0.5))\n model.add(TimeDistributed(Dense(french_vocab_size, activation='softmax')))\n model.compile(loss=sparse_categorical_crossentropy,\n optimizer=Adam(learning_rate),\n metrics=['accuracy'])\n return model",
"def load_vocab():\n # vocab loaded internally at google\n unused = r.sp_model\n del unused\n return r",
"def load_vocab(self):\n\n if self.vocabulary_path: \n # For now, the file format is derived from the file extension.\n if self.vocabulary_path.endswith('csv'):\n self.logger.info(\"Filter spymaster vocabulary by csv-file: {}\".format(self.vocabulary_path))\n with open(self.vocabulary_path, 'r') as fin:\n reader = csv.reader(fin)\n header = next(reader)\n for row in reader:\n word = row[1].lower()\n self.update_vocab(word) \n elif self.vocabulary_path.endswith('txt'):\n self.logger.info(\"Filter spymaster vocabulary by txt-file: {}\".format(self.vocabulary_path))\n with open(self.vocabulary_path, 'r') as fin:\n for line in fin:\n word = line.strip()\n self.update_vocab(word)\n else:\n raise ValueError(\"Unknown file format for filter spymaster vocabulary.\") \n else:\n self.logger.info(\"Load spymaster vocabulary from gensim.models.KeyedVectors.\")\n self.vocab = self.model.vocab\n self.vocab_size = len(self.vocab)\n\n self.logger.info(\"Spymaster vocabulary size is {}\".format(self.vocab_size))",
"def train_vectorizer (train_texts):\n\n tokenizer = text.Tokenizer(num_words=TOP_K)\n tokenizer.fit_on_texts(train_texts)\n\n train_texts = tokenizer.texts_to_sequences(train_texts)\n # get and set max sequence length\n max_length = len(max(train_texts, key=len))\n if max_length > MAX_SEQUENCE_LENGTH:\n max_length = MAX_SEQUENCE_LENGTH\n\n # saving\n with open('tokenizer.pickle', 'wb') as handle:\n pickle.dump(tokenizer, handle, protocol=pickle.HIGHEST_PROTOCOL)\n\n return tokenizer, tokenizer.word_index, max_length",
"def lm_train(data_dir, language, fn_LM):\r\n\r\n # TODO: Implement Function\r\n\r\n language_model, unigram, bigram = {}, {}, {}\r\n CKP = \"WEAREDELETINGEND\"\r\n pre_w = CKP\r\n for root, dirs, files in os.walk(data_dir, topdown=False):\r\n for name in files:\r\n if name.endswith(language):\r\n #print(\"reading \", name)\r\n filepath = os.path.join(data_dir, name)\r\n readingfile = open(filepath, \"r\")\r\n for line in readingfile:\r\n processed = preprocess(line, language)\r\n if len(processed) != 0:\r\n tokenList = processed.split()\r\n for w in tokenList:\r\n # ======================\r\n # for unigram structure\r\n # ======================\r\n # not exist yet, initialize it at count 1\r\n if w not in unigram.keys():\r\n unigram[w] = 1\r\n else:\r\n unigram[w] += 1\r\n\r\n # ======================\r\n # for bigram structure\r\n # ======================\r\n if pre_w not in bigram.keys():\r\n bigram[pre_w] = {} # building the first words level\r\n bigram[pre_w][w] = 1\r\n else:\r\n if w not in bigram[pre_w].keys():\r\n bigram[pre_w][w] = 1\r\n else:\r\n bigram[pre_w][w] += 1\r\n pre_w = w\r\n pre_w = CKP\r\n\r\n\r\n language_model[\"uni\"] = unigram\r\n bigram.pop(CKP)\r\n bigram.pop(\"SENTEND\")\r\n language_model[\"bi\"] = bigram\r\n\r\n #Save Model\r\n with open(fn_LM+'.pickle', 'wb') as handle:\r\n pickle.dump(language_model, handle, protocol=pickle.HIGHEST_PROTOCOL)\r\n\r\n return language_model",
"def train(self, corpus):\n for sentence in corpus.corpus:\n cleanSentence = sentence.cleanSentence()\n for datum in cleanSentence.data:\n token = datum.word\n self.unigramCounts[token] = self.unigramCounts[token] + 1\n self.total += 1\n\n i = 0\n while i < len(sentence.data) - 1:\n token = str(cleanSentence.get(i))\n self.followingWords[token].add(str(cleanSentence.get(i+1)))\n i += 1\n\n i = 1\n while i < len(sentence.data):\n bigram = str(cleanSentence.get(i-1)) + \" \" + str(cleanSentence.get(i))\n self.bigramCounts[bigram] = self.bigramCounts[bigram] + 1\n\n self.precedingWords[str(cleanSentence.get(i))].add(str(cleanSentence.get(i-1)))\n i += 1\n self.precedingWordsTotal = sum(map(lambda x: len(x), self.precedingWords.values()))\n\n i = 2\n while i < len(sentence.data):\n trigram = str(cleanSentence.get(i-2)) + \" \" + str(cleanSentence.get(i-1)) + \" \" + str(cleanSentence.get(i))\n self.trigramCounts[trigram] = self.trigramCounts[trigram] + 1\n i += 1\n\n #print('precedingWords')\n #print(self.precedingWords)\n #print('followingWords')\n #print(self.followingWords)\n #print('unigrams')\n #print(self.unigramCounts)\n #print('bigrams')\n #print(self.bigramCounts)\n\n #self.discount(self.trigramCounts)\n #self.discount(self.bigramCounts)\n #self.discount(self.unigramCounts)",
"def train(self, documents):\n ###DONE\n\n #entire vocab in document set D\n vocab_sod = set()\n vocab_pop = set()\n \n #Calcuates prior probabilities\n priorSOD = 0 #how many docs are spam\n priorPOP = 0 #how many docs are ham\n \n #Cacluates Tct\n term_freq_sod = {} #{term:occur, term:occur}\n term_freq_pop = {}\n \n #Tct'\n Tct_sod = 0 #Tct' = sum of (every term occurence in class c + 1)\n Tct_pop = 0\n \n for doc in documents: \n if 'sod' in doc.label:\n priorSOD += 1\n for token in doc.tokens:\n Tct_sod += 1\n if token in term_freq_sod.keys():\n term_freq_sod[token] = term_freq_sod[token] + 1\n else:\n term_freq_sod[token] = 1\n vocab_sod.add(token) \n else:\n priorPOP += 1\n for token in doc.tokens:\n Tct_pop += 1\n if token in term_freq_pop.keys():\n term_freq_pop[token] = term_freq_pop[token] + 1\n else:\n term_freq_pop[token] = 1\n vocab_pop.add(token)\n \n \n #endfor\n # | is for set join\n self.vocab = vocab_sod | vocab_pop #gets rid of duplicate words (those in both 'ham' and 'spam') \n \n #Tct Primes\n #tct' = term freq of all terms in class c + 1*(total terms)\n Tct_sod = Tct_sod + len(self.vocab) \n Tct_pop = Tct_pop + len(self.vocab) \n \n \n print(\"PriorSod: \" + str(priorSOD))\n print(\"PriorPop: \" + str(priorPOP))\n print(\"LEN Docum: \" + str(len(documents)))\n \n self.priorSOD = priorSOD / len(documents)\n self.priorPOP = priorPOP / len(documents)\n \n for term in self.vocab:\n if term in term_freq_pop.keys():\n self.cond_prob_pop[term] = (term_freq_pop[term] + 1) / Tct_pop\n else:\n self.cond_prob_pop[term] = 1 / Tct_pop\n \n if term in term_freq_sod.keys():\n self.cond_prob_sod[term] = (term_freq_sod[term] + 1) / Tct_sod\n else:\n self.cond_prob_sod[term] = 1 / Tct_sod\n \n \n pass",
"def get_vocab(self):\n\n\t\tself.parse_transcript() \n\t\tself.purge_words()\n\t\tself.analyze_words()\n\t\tself.sort_word_analysis()"
] | [
"0.7002918",
"0.69951147",
"0.68627745",
"0.67508525",
"0.66912574",
"0.66359586",
"0.6607676",
"0.6582862",
"0.65730536",
"0.65690607",
"0.65606356",
"0.65052295",
"0.64981496",
"0.6493124",
"0.64924264",
"0.648903",
"0.6479486",
"0.64262235",
"0.6392545",
"0.6390712",
"0.63851887",
"0.6384979",
"0.63699853",
"0.63583654",
"0.6349198",
"0.6332308",
"0.6320147",
"0.63031244",
"0.6297802",
"0.62972933",
"0.629229",
"0.62755704",
"0.6269444",
"0.62584674",
"0.6258413",
"0.6257802",
"0.6249101",
"0.62481195",
"0.623757",
"0.6236912",
"0.6234814",
"0.6219097",
"0.6217433",
"0.62134457",
"0.62129843",
"0.6208663",
"0.6197741",
"0.6180845",
"0.6179771",
"0.61709297",
"0.61620367",
"0.6159182",
"0.6153241",
"0.61518085",
"0.61442137",
"0.61439466",
"0.6138246",
"0.6135106",
"0.613389",
"0.6126375",
"0.61235297",
"0.61233103",
"0.6105804",
"0.61010426",
"0.60979396",
"0.60865",
"0.6082665",
"0.6074734",
"0.6071782",
"0.6069176",
"0.60676277",
"0.6064513",
"0.6064218",
"0.60630906",
"0.6061387",
"0.6058677",
"0.60540724",
"0.6052601",
"0.60497665",
"0.60482574",
"0.6045776",
"0.60310113",
"0.6029232",
"0.60288465",
"0.60269",
"0.60238016",
"0.60223544",
"0.60192096",
"0.6010658",
"0.6009259",
"0.6006326",
"0.6005048",
"0.6003761",
"0.6001121",
"0.5997654",
"0.5995868",
"0.59933",
"0.59929824",
"0.59872687",
"0.5986948",
"0.59856266"
] | 0.0 | -1 |
Model trained with negative sampling. | def __init__(self, vocab_size: int, embedding_dim: int, dropout: float = 0.2, pad_idx: int = Vocabulary.pad_idx):
super(NegativeSamplingModel, self).__init__()
self.in_embedding = nn.Embedding(vocab_size, embedding_dim, padding_idx=pad_idx)
self.out_embedding = nn.Embedding(vocab_size, embedding_dim, padding_idx=pad_idx)
self.embed_dropout = nn.Dropout(dropout)
self.rnn = nn.LSTM(embedding_dim, embedding_dim)
self.embedding_dim = embedding_dim
self.pad_idx = pad_idx
initrange = 0.5 / embedding_dim
self.in_embedding.weight.data.uniform_(-initrange, initrange)
self.in_embedding.weight.data[pad_idx].zero_()
self.out_embedding.weight.data.uniform_(-initrange, initrange)
self.out_embedding.weight.data[pad_idx].zero_() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def neg_sampling_transform(data):\n train_neg_edge_index = negative_sampling(\n edge_index=data.train_pos_edge_index, num_nodes=data.num_nodes,\n num_neg_samples=data.train_pos_edge_index.size(1))\n data.train_edge_index = torch.cat(\n [data.train_pos_edge_index, train_neg_edge_index], dim=-1)\n data.train_edge_label = create_link_label(data.train_pos_edge_index,\n train_neg_edge_index)\n\n return data",
"def nonlearning():\n\taT.featureAndTrain(['../../AudioData/chunked_data_sorted/pos', '../../AudioData/chunked_data_sorted/neg'], \n\t\t\t\t\t\t1.0, 1.0, aT.shortTermWindow, aT.shortTermStep, \n \"svm\", \"emotion_classifier\", True)",
"def negative_sampling(self):\n \n self.train_arr = []\n sample_list = np.random.choice(list(range(self.item_count)), size = 10 * len(self.interactions) * self.num_ns)\n \n sample_idx = 0\n for user, pos_item, _ in self.interactions:\n ns_count = 0\n \n while True:\n neg_item = sample_list[sample_idx]\n if not is_visited(self.rating_mat, user, neg_item):\n self.train_arr.append((user, pos_item, neg_item))\n sample_idx += 1\n ns_count += 1\n if ns_count == self.num_ns:\n break\n \n sample_idx += 1",
"def predict_only(self):",
"def global_uniform_negative_sampling(\n self, num_samples, exclude_self_loops=True, replace=False, etype=None\n ):\n raise NotImplementedError(\n \"global_uniform_negative_sampling not implemented yet\"\n )",
"def init_negative_sampler(self, unigram_power=3/4, strategy=\"word2vec\"):\n if strategy == \"word2vec\":\n counts = self.target['dst'].value_counts(normalize=True)\n freq = counts.values ** unigram_power\n self.freq = freq / sum(freq)\n self.dst_idxs = counts.index\n self.dst_neg_sampling = lambda size: np.random.choice(self.dst_idxs, size, replace=True, p=self.freq)\n elif strategy == \"uniform\":\n self.dst_neg_sampling = lambda size: np.random.choice(self.unique_dst, size, replace=True)",
"def generate_negative_samples(self, data, sampled_data, zeros=[], validation=False):\n negative_sampled_data = []\n negative_sampled_indices = []\n for sample in sampled_data:\n i = data['pos'].index(sample) ## index of a particular move in a demo\n all_num = 0\n for which, num in enumerate(data['leng_pos']):\n all_num += num\n if all_num > i:\n which_demo = which ## index of a demo the move with index i comes from\n break\n\n sum_neg_lengths = sum(data['leng_neg'][:which_demo])\n\n key = sum_neg_lengths-1 \n value = sum_neg_lengths + data['leng_neg'][which_demo]\n demo_negative_data = data['neg'][key : value]\n state, action = sample\n for demo_state, demo_action in demo_negative_data:\n if demo_state == state:\n negative_sampled_data.extend([(demo_state, demo_action)])\n demo_index = data['neg'].index((demo_state, demo_action))\n negative_sampled_indices.append(demo_index)\n\n if not validation:\n num_pos = sum(self.pipeline_y == 1)\n num_neg = len(negative_sampled_data)\n pos_sample = self.pipeline_X[:num_pos, :]\n neg_sample = self.pipeline_X[num_pos + negative_sampled_indices, :]\n y_vector = [1] * num_pos + [0] * num_neg\n ######################### Mouselab ad-hc #########################\n ########################## Removing 0's ##########################\n non_zero = [self.pipeline_X[i, :] for i in range(num_pos)\n if i not in zeros]\n pos_sample = vstack(non_zero) if non_zero != [] else self.pipeline_X[0,:]\n num_pos = pos_sample.shape[0]\n y_vector = [1] * num_pos + [0] * num_neg\n ##################################################################\n\n self.pipeline_X = vstack((pos_sample, neg_sample))\n self.pipeline_y = np.array(y_vector, dtype='uint8')\n \n return negative_sampled_data",
"def _sample_seed(self):\n raise Exception(\" not implemented in base model\")",
"def sample_model(model, x, y, params_init, model_loss='multi_class_linear_output' ,num_samples=10, num_steps_per_sample=10, step_size=0.1, burn=0, inv_mass=None, jitter=None, normalizing_const=1., softabs_const=None, explicit_binding_const=100, fixed_point_threshold=1e-5, fixed_point_max_iterations=1000, jitter_max_tries=10, sampler=Sampler.HMC, integrator=Integrator.IMPLICIT, metric=Metric.HESSIAN, debug=False, tau_out=1.,tau_list=None, store_on_GPU = True, desired_accept_rate=0.8, verbose = False):\n\n device = params_init.device\n params_shape_list = []\n params_flattened_list = []\n build_tau = False\n if tau_list is None:\n tau_list = []\n build_tau = True\n for weights in model.parameters():\n params_shape_list.append(weights.shape)\n params_flattened_list.append(weights.nelement())\n if build_tau:\n tau_list.append(torch.tensor(1.))\n\n log_prob_func = define_model_log_prob(model, model_loss, x, y, params_flattened_list, params_shape_list, tau_list, tau_out, normalizing_const=normalizing_const, device = device)\n\n if torch.cuda.is_available():\n torch.cuda.empty_cache()\n\n return sample(log_prob_func, params_init, num_samples=num_samples, num_steps_per_sample=num_steps_per_sample, step_size=step_size, burn=burn, jitter=jitter, inv_mass=inv_mass, normalizing_const=normalizing_const, softabs_const=softabs_const, explicit_binding_const=explicit_binding_const, fixed_point_threshold=fixed_point_threshold, fixed_point_max_iterations=fixed_point_max_iterations, jitter_max_tries=jitter_max_tries, sampler=sampler, integrator=integrator, metric=metric, debug=debug, desired_accept_rate=desired_accept_rate, store_on_GPU = store_on_GPU, verbose = verbose)",
"def test_no_duplicates_and_positives_in_negative_sample(self):\n model = PoincareModel(self.data_large, negative=3)\n positive_nodes = model.node_relations[0] # Positive nodes for node 0\n num_samples = 100 # Repeat experiment multiple times\n for i in range(num_samples):\n negatives = model._sample_negatives(0)\n self.assertFalse(positive_nodes & set(negatives))\n self.assertEqual(len(negatives), len(set(negatives)))",
"def no_overfitting(self):\n\n # Instance with minimun length should be the maximum length\n train_len = []\n [train_len.append(st['Nevents']) for st in self.stats]\n train_len = np.array(train_len)\n max_len = train_len[train_len != 0].min()\n\n # CROPS FEATURE SAMPLES\n onpower_train = pd.DataFrame()\n offpower_train = pd.DataFrame()\n duration_train = pd.DataFrame()\n start = 0\n end = 0\n for ind in np.arange(len(self.stats)):\n if self.stats[ind]['Nevents'] != 0:\n if ind == 0:\n start = 0\n else:\n start = end\n end += self.stats[ind]['Nevents']\n\n aux = self.onpower_train[start:end]\n aux = aux[:max_len]\n onpower_train = pd.concat([onpower_train, aux])\n\n aux = self.offpower_train[start:end]\n aux = aux[:max_len]\n offpower_train = pd.concat([offpower_train, aux])\n\n aux = self.duration_train[start:end]\n aux = aux[:max_len]\n duration_train = pd.concat([duration_train, aux])\n\n # udating stats:\n self.stats[ind]['Nevents'] = max_len\n\n self.onpower_train = onpower_train\n self.offpower_train = offpower_train\n self.duration_train = duration_train\n\n # RE-TRAINS FEATURES:\n self.__retrain(self.onpower, self.onpower_train)\n self.__retrain(self.offpower, self.offpower_train)\n self.__retrain(self.duration, self.duration_train)",
"def baseline_model(optimizer='rmsprop', init='glorot_uniform', dropout=0.2):\n model = keras.models.Sequential()\n model.add(Dropout(dropout, input_shape=(12,)))\n model.add(Dense(12, input_dim=12, kernel_initializer=init, activation='relu'))\n model.add(Dropout(dropout))\n model.add(Dense(1, kernel_initializer=init))\n # Compile model\n model.compile(loss='mean_squared_error', optimizer=optimizer)\n return model",
"def __neg__(self):\n return TensorWithIndices(-self._tensor, \n self._con + '_' + self._cov)",
"def backward_sample(self, target):\n return self.flow.bijector.inverse(target)",
"def negative_sampling(word_input, target, unigrams_table, neg_examples_size=5):\n negative_examples = []\n while len(negative_examples) is not neg_examples_size:\n neg_sample = np.random.choice(unigrams_table)\n # Make sure that the negative example is not the same as the training or as the target.\n # This will block if there only is one value within the unigram table\n if (neg_sample != word_input) and (neg_sample != target):negative_examples.append(neg_sample)\n else:pass\n return negative_examples",
"def test_error_if_negative_more_than_population(self):\n model = PoincareModel(self.data, negative=5)\n with self.assertRaises(ValueError):\n model.train(epochs=1)",
"def turn_off_learning(self):\n self.epsilon = 0\n self.alpha = 0",
"def __call__(self, target_labels: List[Tensor], fg_probs: Tensor):\n return super(NegativeSampler, self).__call__(target_labels)",
"def _train_model(self):\n raise NotImplementedError()",
"def test_non_blocked(self):\n _, model = simple_2model()\n with model:\n for sampler in self.samplers:\n assert isinstance(sampler(blocked=False), CompoundStep)",
"def zero_negative_weights(self):\n for k in range(len(self)):\n self[k] *= 0 if self[k] < 0 else 1\n self.finalized = True\n return self",
"def _untrain(self):\n if self.__clf:\n self.__clf._untrain()",
"def test_non_zero_loss(self):\n # Reset models.\n self.model.load_state_dict(self.initial_model_dict)\n self.actor_model.load_state_dict(self.initial_actor_model_dict)\n\n polybeast.learn(*self.learn_args)\n\n self.assertNotEqual(self.stats[\"total_loss\"], 0.0)\n self.assertNotEqual(self.stats[\"pg_loss\"], 0.0)\n self.assertNotEqual(self.stats[\"baseline_loss\"], 0.0)\n self.assertNotEqual(self.stats[\"entropy_loss\"], 0.0)",
"def train_not_distributed():\n with tf.Graph().as_default() as graph:\n # Prepare the data\n train_data, test_data, embeddings_file = prepare_data()\n\n # Create model\n model = create_model(False)\n\n # Create summaries and SummaryWriter\n (test_loss, test_perplexity, bucket_loss_placeholders,\n bucket_perplexity_placeholders, summary, summary_writer) = create_summary_objects(graph)\n\n with tf.Session() as sess:\n init_model(sess, model)\n after_init(sess, model, embeddings_file)\n\n train(sess, model, train_data, test_data, summary, summary_writer, test_loss,\n test_perplexity, bucket_loss_placeholders, bucket_perplexity_placeholders)",
"def fit(self, X, y):\n torch.manual_seed(1234)\n optimizer = torch.optim.Adam(self.net.parameters(), lr=self.config['step_size'])\n neg_elbo = torch.zeros([self.config['num_epochs'], 1])\n params_store = {}\n for epoch in range(self.config['num_epochs']):\n loss = self.net.neg_elbo(num_batches=1, x=X, y=y.float().unsqueeze(dim=1)) / X.shape[0]\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if hasattr(self.net, 'fixed_point_updates'):\n # for hshoe or regularized hshoe nets\n self.net.fixed_point_updates()\n neg_elbo[epoch] = loss.item()\n if (epoch + 1) % 10 == 0:\n # print ((net.noise_layer.bhat/net.noise_layer.ahat).data.numpy()[0])\n print('Epoch[{}/{}], neg elbo: {:.6f}, noise var: {:.6f}'\n .format(epoch + 1, self.config['num_epochs'], neg_elbo[epoch].item() / X.shape[0],\n self.net.get_noise_var()))\n params_store[epoch] = copy.deepcopy(self.net.state_dict()) # for small nets we can just store all.\n best_model_id = neg_elbo.argmin() # loss_val_store.argmin() #\n self.net.load_state_dict(params_store[best_model_id.item()])\n\n return self",
"def test(self):\n self.training = False",
"def test_nonparam(self):\n Y, T, X, _ = ihdp_surface_B()\n est = AutomatedNonParamDML(model_y=automl_model_reg(),\n model_t=automl_model_clf(),\n model_final=automl_model_sample_weight_reg(), featurizer=None,\n discrete_treatment=True)\n est.fit(Y, T, X=X)\n _ = est.effect(X)",
"def __neg__(self):\n return Factor().__build( VarSet(self.v) , np.negative(self.t) )",
"def eval(self):\n self.train(mode=False)",
"def train(self):\n pass",
"def train(self):\n pass",
"def train(self):\n pass",
"def train(self):\n pass",
"def train(self):\n pass",
"def negative_sampling(data: pd.DataFrame,\n vocab: np.ndarray,\n noise_distribution: list,\n neg_sample_size: int\n ) -> pd.DataFrame:\n \n def samples_generator(word: str\n ) -> List[str]:\n while True:\n samples = np.random.choice(\n vocab, neg_sample_size, p=noise_distribution\n )\n if word not in samples:\n return samples\n \n data['negative_samples'] = data['centre_word'].apply(samples_generator)\n return data",
"def model_switch_to_training(self):\n pass",
"def train(self, x={}, **kwargs):\n return 0",
"def train_naive(): # add arguments as needed\n pass",
"def train(self):\n raise IllegalOperationError(\"Cannot train multiplicative model!\")",
"def __init__(self,seed=None,batch_size=60, use_earlyStopping=False, loaded_model=None):\n self.batch_size = batch_size\n self.pruning_rates = PRUNING_PERCENTAGES\n self.early_stopping = use_earlyStopping\n\n if loaded_model == None:\n self.model = keras.Sequential()\n self.model.add(keras.layers.Flatten(input_shape=(28, 28)))\n for layer in LAYERS:\n (units, activation) = LAYERS[layer]\n self.model.add(keras.layers.Dense(units, activation=activation, kernel_initializer=tf.keras.initializers.glorot_normal(seed=None)))\n \n \n self.model.compile(optimizer=OPTIMIZER_FC,\n loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),\n metrics=['accuracy'])\n\n self.weights_init = self.get_weights()\n \n else:\n self.model = loaded_model\n\n if use_earlyStopping:\n self.es = EarlyStopping(monitor='val_loss', patience=10)",
"def negSamplingCostAndGradient(predicted, target, outputVectors, dataset,\n K=10):\n\n # Sampling of indices is done for you. Do not modify this if you\n # wish to match the autograder and receive points!\n indices = [target]\n indices.extend(getNegativeSamples(target, dataset, K))\n\n ### YOUR CODE HERE\n grad = np.zeros_like(outputVectors)\n gradPred = np.zeros_like(predicted)\n cost = 0.0\n probability = 0.0\n for sample_idx in indices:\n similarity = outputVectors[sample_idx].dot(predicted.T)\n probability = sigmoid(similarity) # squash to 0 ~ 1\n if sample_idx == target: # positive sample\n #p = sigmoid(outputVectors[sample_idx].dot(predicted.T))\n cost += -np.log(sigmoid(similarity))\n else: # negative sample\n #p = sigmoid(-outputVectors[sample_idx].dot(predicted.T))\n cost += -np.log(sigmoid(-similarity)) # deduction from reference 2.\n \n if sample_idx == target:\n grad[sample_idx, :] += (probability - 1) * predicted\n gradPred += (probability - 1) * outputVectors[sample_idx]\n else:\n grad[sample_idx, :] += probability * predicted\n gradPred += probability * outputVectors[sample_idx]\n '''\n V, D = outputVectors.shape\n one_hot_target = np.zeros(V)\n one_hot_target[target] = 1\n cost = 0\n gradPred = np.zeros_like(predicted)\n grad = np.zeros_like(outputVectors)\n \n for idx in indices:\n context_vector = outputVectors[idx] # embedding vector (1, D)\n cosine_similarity = normalizeRows(predicted).dot(normalizeRows(context_vector).T)\n print('neg sample, consine_similarity={0}'.format(cosine_similarity))\n binary_class = sigmoid(cosine_similarity)\n print('neg sample, binary_class={0}'.format(binary_class))\n \n if idx == target:\n cost += binary_class - 1\n else:\n cost += binary_class\n \n dlogits = sigmoid_grad(cosine_similarity)\n #gradPred += dlogits * normalizeRows(context_vector)\n #grad += np.outer(one_hot_target, dlogits * normalizeRows(predicted))\n gradPred += dlogits\n grad += np.outer(one_hot_target, dlogits)\n '''\n ### END YOUR CODE\n\n return cost, gradPred, grad",
"def trainModel( self, featureTrain, classTrain):",
"def train():\n pass",
"def _load_best_model(self) -> None:\n self.trainer.resume()",
"def reset_parameters(self) -> None:\n std = math.sqrt(3 / self.in_features)\n self.weight.data.uniform_(-std, std)\n self.bias.data.uniform_(-std, std)",
"def generate_limittedmodel():\r\n print('Loading model')\r\n model = KeyedVectors.load_word2vec_format(BIN_NAME, binary=True)\r\n print('Model loaded!')\r\n\r\n print('Loading dot products')\r\n dp = np.load(DP_NAME)\r\n print('Dot products loaded')\r\n\r\n print('Filtering vocab')\r\n for name, vocab in list(model.vocab.items()):\r\n if dp[vocab.index] < MAX_DEGREE:\r\n del model.vocab[name]\r\n\r\n il = list(model.vocab.items())\r\n print('Sorting vocab')\r\n il.sort(key=lambda x: x[1].index)\r\n\r\n # Find the indexes of the words that are being kept\r\n print('Generating indexes')\r\n indexes = []\r\n for i in range(0, len(il)):\r\n name, vocab = il[i]\r\n indexes.append(vocab.index)\r\n model.vocab[name].index = i\r\n\r\n print('Modifying model weights')\r\n model.syn0 = model.syn0[indexes]\r\n\r\n print('Saving file')\r\n model.save_word2vec_format(SAVE_NAME, binary=True)",
"def speaker_negative_normalization(features: pd.DataFrame, df: pd.DataFrame):\n data = features.values.copy()\n for speaker in df['speaker'].unique():\n indices = df['speaker'] == speaker\n negative_indices = (df['speaker'] == speaker) & (df['covid'] == False)\n print(negative_indices)\n scaler = StandardScaler()\n scaler.fit(data[negative_indices, :])\n data[indices, :] = scaler.transform(data[indices, :])\n return pd.DataFrame(\n data=data,\n index=features.index,\n columns=features.columns\n )",
"def train(self)->None:",
"def negative(n, W, b):\n\n\treturn make_automata(n, W, b, grid = None)",
"def test_training(self):\n model = PoincareModel(self.data_large, burn_in=0, negative=3)\n old_vectors = np.copy(model.kv.syn0)\n model.train(epochs=2)\n self.assertFalse(np.allclose(old_vectors, model.kv.syn0))",
"def _sample_neg(self, assign_result, num_expected, **kwargs):\n neg_inds = torch.nonzero(assign_result.gt_inds == 0)\n if neg_inds.numel() != 0:\n neg_inds = neg_inds.squeeze(1)\n if len(neg_inds) <= num_expected:\n repeat_ = num_expected // neg_inds.numel()\n return torch.cat((neg_inds.repeat(repeat_), self.random_choice(neg_inds, num_expected % neg_inds.numel())))\n else:\n return self.random_choice(neg_inds, num_expected)",
"def set_to_zero_model_weights(model):\n\n for layer_weigths in model.parameters():\n layer_weigths.data.sub_(layer_weigths.data)",
"def test_does_not_sample_negligible_weight_ppswor(self):\n s = private_sampling.ThresholdSample(1.0,\n private_sampling.PpsworSamplingMethod)\n s.process(\n \"a\",\n math.log(\n FAILURE_PROBABILITY_INVERSE / (FAILURE_PROBABILITY_INVERSE - 1),\n math.e))\n self.assertEmpty(s.elements)",
"def reset(self):\n weight = self.module.weight.data\n self.sensitivity_in = torch.zeros(weight.shape[1]).to(weight.device)\n self._features = torch.Tensor()\n self._current_batch = 1",
"def setup_to_transfer_learn(model):\n for layer in model.layers:\n layer.trainable = False\n\n #model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])",
"def create_model(self):\n self.model = None\n pass",
"def test_burn_in(self):\n model = PoincareModel(self.data, burn_in=1, negative=3)\n original_vectors = np.copy(model.kv.syn0)\n model.train(epochs=0)\n self.assertFalse(np.allclose(model.kv.syn0, original_vectors))",
"def _create_model(self):\n\n model_formula = self.get_model_formula()\n\n removed_observation_index = self._model_dataset.index.isin(self._excluded_observations)\n\n # TODO: Handle error that occurs when all model observations are invalid\n model = smf.ols(model_formula,\n data=self._model_dataset,\n subset=~removed_observation_index,\n missing='drop')\n\n self._model = model",
"def train(self):\n return",
"def backwards_de(self, input_, expected, idx):\r\n\r\n\r\n trial = self._mutant(idx, self.F)\r\n self.set_weights_to_layers(trial)\r\n vec_output = self.forward(input_)\r\n trial_loss = torch.mean(self._objective(vec_output, expected,self.device)).item()\r\n \r\n if trial_loss <= self.past_loss[idx] :\r\n self.population[idx] = trial[:]\r\n self.past_loss[idx] = trial_loss",
"def sample(self, state, model_args, model_kwargs):\n raise NotImplementedError",
"def reset(self):\n checkpoint = torch.load(\n 'model_lr_finder.pth.tar',\n map_location=self.device)\n self.model.load_state_dict(checkpoint['state_dict'])\n self.optimizer.load_state_dict(checkpoint['optimizer'])\n self.model.to(self.device)\n self.model.train()",
"def train_model(self):\n ### Early Stop Mechanism\n loss = previous_loss = float(\"inf\")\n patience_left = self.config.patience\n ### Early Stop Mechanism\n\n self.generator = Generator(self.model.config, training_strategy=self.training_strategy)\n self.evaluator = Evaluator(model=self.model, data_type=self.teston, debug=self.debug)\n\n if self.config.loadFromData:\n self.load_model()\n \n for cur_epoch_idx in range(self.config.epochs):\n print(\"Epoch[%d/%d]\"%(cur_epoch_idx,self.config.epochs))\n loss = self.train_model_epoch(cur_epoch_idx)\n self.test(cur_epoch_idx)\n\n ### Early Stop Mechanism\n ### start to check if the loss is still decreasing after an interval. \n ### Example, if early_stop_epoch == 50, the trainer will check loss every 50 epoche.\n ### TODO: change to support different metrics.\n if ((cur_epoch_idx + 1) % self.config.early_stop_epoch) == 0: \n if patience_left > 0 and previous_loss <= loss:\n patience_left -= 1\n print('%s more chances before the trainer stops the training. (prev_loss, curr_loss): (%.f, %.f)' % \\\n (patience_left, previous_loss, loss))\n\n elif patience_left == 0 and previous_loss <= loss:\n self.evaluator.result_queue.put(Evaluator.TEST_BATCH_EARLY_STOP)\n break\n else:\n patience_left = self.config.patience\n\n previous_loss = loss\n ### Early Stop Mechanism\n\n self.generator.stop()\n self.evaluator.save_training_result(self.training_results)\n self.evaluator.stop()\n\n if self.config.save_model:\n self.save_model()\n\n if self.config.disp_result:\n self.display()\n\n if self.config.disp_summary:\n self.config.summary()\n self.config.summary_hyperparameter(self.model.model_name)\n\n self.export_embeddings()\n\n return loss",
"def _sample_neg(self, assign_result, num_expected, **kwargs):\n neg_inds = torch.nonzero(assign_result.gt_inds == 0)\n if neg_inds.numel() != 0:\n neg_inds = neg_inds.squeeze(1)\n if len(neg_inds) <= num_expected:\n return neg_inds\n else:\n return self.random_choice(neg_inds, num_expected)",
"def init_emb(self):\r\n initrange = 0.5 / self.embedding_dim\r\n self.embeddings.weight.data.uniform_(-initrange, initrange)\r\n self.affine.weight.data.uniform_(-0, 0)\r\n self.affine.bias.data.zero_()",
"def add_negative_samples(skipgram_data, unigrams_table, neg_examples_size=5):\n sg_neg_examples = []\n total_data = len(skipgram_data)\n for i, sg in tqdm(enumerate(skipgram_data), desc=\"Processing neg. samples ({} in total)\".format((total_data-1)),\n unit= \" neg. samples\"):\n for gram in sg:\n gram += negative_sampling(word_input=gram[0], target=gram[1],\n unigrams_table=unigrams_table, neg_examples_size=neg_examples_size)\n sg_neg_examples.append(gram)\n return sg_neg_examples",
"def E_step_precompute(self, model_params, my_suff_stat, my_data):",
"def _sample_free_negative(self, kit_mask):\n max_val = self._H * self._W\n num_neg = int(100 * self._sample_ratio)\n negative_indices = []\n while len(negative_indices) < num_neg:\n negative_indices.append(np.random.randint(0, max_val))\n negative_indices = np.vstack(np.unravel_index(negative_indices, (self._H, self._W))).T\n idxs = np.random.choice(np.arange(len(kit_mask)), size=30, replace=False)\n inside = kit_mask[idxs]\n negative_indices = np.vstack([negative_indices, inside])\n return negative_indices",
"def loadtrainData_undersampling():\n train = []\n fileIn = open(PATH + 'traindata_Subtask4.txt')\n for line in fileIn.readlines():\n lineArr = line.strip().split()\n train.append([float(lineArr[i]) for i in range(len(lineArr))])\n\n pos = []\n neg = []\n for i in train:\n if i[-1] == 1.0:\n pos.append(i)\n else:\n neg.append(i)\n slice1 = random.sample(neg, len(pos))\n data = pos + slice1\n train_x = []\n train_y = []\n y = []\n for line in data:\n train_x.append([float(line[i]) for i in range(len(line) - 1)])\n y.append([int(line[-1])])\n for i in range(len(y)):\n train_y.append(y[i][0])\n return np.mat(train_x), np.mat(train_y).transpose()",
"def __init__(self,\r\n model_class=ModelLinear,\r\n loss_func=torch.nn.BCELoss(reduction='none'),\r\n scoring_func=None,\r\n epochs=1000, batch_size=-1,\r\n adam=False,\r\n lr=1e-1, momentum=0.9,\r\n l1_reg=0, l2_reg=0,\r\n weighted_samples=False,\r\n gpu=True, used_gpu=0, sample_gpu=False,\r\n verbose=0,\r\n **kwargs):\r\n super().__init__()\r\n\r\n self.model_class = model_class\r\n self.loss_func = loss_func\r\n self.scoring_func = scoring_func\r\n self.epochs = epochs\r\n self.batch_size = batch_size\r\n self.adam = adam\r\n self.lr = lr\r\n self.momentum = momentum\r\n self.l1_reg = l1_reg\r\n self.l2_reg = l2_reg\r\n self.weighted_samples = weighted_samples\r\n self.gpu = gpu\r\n self.used_gpu = used_gpu\r\n self.sample_gpu = sample_gpu\r\n self.verbose = verbose\r\n\r\n if kwargs:\r\n # additional arguments keys stored for use within fit()\r\n self.additional_args = list(kwargs)\r\n # additional arguments stored as properties for cross_val\r\n self.__dict__.update(kwargs)\r\n else:\r\n self.additional_args = []\r\n\r\n if verbose > 1:\r\n print(\"Model will be instanciated using the following arguments:\",\r\n self.__dict__)",
"def run_negative():\n\tdef cy1(x, y): return y[:,0] - 2.5 # y < 2.5\n\tdef cy2(x, y): return 3 - y[:,0] # y > 3\n\tdef cx1(x, y): return -0.3 - x[:,0] # x > -0.3\n\tdef cx2(x, y): return x[:,0] - 0.3 # x < 0.3\n\n\tdef addons():\n\t\tdom = np.arange(-0.3, 0.3, 0.05)\n\t\tplt.fill_between(dom, 3.0, plt.ylim()[1], facecolor='#E41A1C', alpha=0.5, zorder=101)\n\t\tplt.fill_between(dom, plt.ylim()[0], 2.5, facecolor='#E41A1C', alpha=0.5, zorder=101)\n\n\tbnn = BNNSVGDRegressor(uid=\"bnn-negative-eg\", configfile=\"configs/bnn-negative-eg.json\")\n\tbnn.load(**toy1())\n\tbnn.add_negative_constraint((-5.0, 5.0), [cy1, cx1, cx2])\n\tbnn.add_negative_constraint((-5.0, 5.0), [cy2, cx1, cx2])\n\tbnn.infer()\n\tbnn.plot_pp(plot_title=\"Predictive Posterior Plot\", domain=np.arange(-5, 5, 0.05), ylims=(-9, 7), addons=addons)",
"def test_does_not_sample_negligible_weight_priority(self):\n s = private_sampling.ThresholdSample(\n 1.0, private_sampling.PrioritySamplingMethod)\n s.process(\"a\", 1.0 / FAILURE_PROBABILITY_INVERSE)\n self.assertEmpty(s.elements)",
"def self_training(args):\n\n print('load pre-trained model from [%s]' % args.load_model, file=sys.stderr)\n params = torch.load(args.load_model, map_location=lambda storage, loc: storage)\n vocab = params['vocab']\n transition_system = params['transition_system']\n saved_args = params['args']\n saved_state = params['state_dict']\n\n # transfer arguments\n saved_args.cuda = args.cuda\n saved_args.save_to = args.save_to\n saved_args.train_file = args.train_file\n saved_args.unlabeled_file = args.unlabeled_file\n saved_args.dev_file = args.dev_file\n saved_args.load_decode_results = args.load_decode_results\n args = saved_args\n\n update_args(args)\n\n model = Parser(saved_args, vocab, transition_system)\n model.load_state_dict(saved_state)\n\n if args.cuda: model = model.cuda()\n model.train()\n optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)\n\n print('load unlabeled data [%s]' % args.unlabeled_file, file=sys.stderr)\n unlabeled_data = Dataset.from_bin_file(args.unlabeled_file)\n\n print('load decoding results of unlabeled data [%s]' % args.load_decode_results, file=sys.stderr)\n decode_results = pickle.load(open(args.load_decode_results))\n\n labeled_data = Dataset.from_bin_file(args.train_file)\n dev_set = Dataset.from_bin_file(args.dev_file)\n\n print('Num. examples in unlabeled data: %d' % len(unlabeled_data), file=sys.stderr)\n assert len(unlabeled_data) == len(decode_results)\n self_train_examples = []\n for example, hyps in zip(unlabeled_data, decode_results):\n if hyps:\n hyp = hyps[0]\n sampled_example = Example(idx='self_train-%s' % example.idx,\n src_sent=example.src_sent,\n tgt_code=hyp.code,\n tgt_actions=hyp.action_infos,\n tgt_ast=hyp.tree)\n self_train_examples.append(sampled_example)\n print('Num. self training examples: %d, Num. labeled examples: %d' % (len(self_train_examples), len(labeled_data)),\n file=sys.stderr)\n\n train_set = Dataset(examples=labeled_data.examples + self_train_examples)\n\n print('begin training, %d training examples, %d dev examples' % (len(train_set), len(dev_set)), file=sys.stderr)\n print('vocab: %s' % repr(vocab), file=sys.stderr)\n\n epoch = train_iter = 0\n report_loss = report_examples = 0.\n history_dev_scores = []\n num_trial = patience = 0\n while True:\n epoch += 1\n epoch_begin = time.time()\n\n for batch_examples in train_set.batch_iter(batch_size=args.batch_size, shuffle=True):\n batch_examples = [e for e in batch_examples if len(e.tgt_actions) <= args.decode_max_time_step]\n\n train_iter += 1\n optimizer.zero_grad()\n\n loss = -model.score(batch_examples)\n # print(loss.data)\n loss_val = torch.sum(loss).data[0]\n report_loss += loss_val\n report_examples += len(batch_examples)\n loss = torch.mean(loss)\n\n loss.backward()\n\n # clip gradient\n if args.clip_grad > 0.:\n grad_norm = torch.nn.utils.clip_grad_norm(model.parameters(), args.clip_grad)\n\n optimizer.step()\n\n if train_iter % args.log_every == 0:\n print('[Iter %d] encoder loss=%.5f' %\n (train_iter,\n report_loss / report_examples),\n file=sys.stderr)\n\n report_loss = report_examples = 0.\n\n print('[Epoch %d] epoch elapsed %ds' % (epoch, time.time() - epoch_begin), file=sys.stderr)\n # model_file = args.save_to + '.iter%d.bin' % train_iter\n # print('save model to [%s]' % model_file, file=sys.stderr)\n # model.save(model_file)\n\n # perform validation\n print('[Epoch %d] begin validation' % epoch, file=sys.stderr)\n eval_start = time.time()\n eval_results = evaluation.evaluate(dev_set.examples, model, args, verbose=True)\n dev_acc = eval_results['accuracy']\n print('[Epoch %d] code generation accuracy=%.5f took %ds' % (epoch, dev_acc, time.time() - eval_start), file=sys.stderr)\n is_better = history_dev_scores == [] or dev_acc > max(history_dev_scores)\n history_dev_scores.append(dev_acc)\n\n if is_better:\n patience = 0\n model_file = args.save_to + '.bin'\n print('save currently the best model ..', file=sys.stderr)\n print('save model to [%s]' % model_file, file=sys.stderr)\n model.save(model_file)\n # also save the optimizers' state\n torch.save(optimizer.state_dict(), args.save_to + '.optim.bin')\n elif epoch == args.max_epoch:\n print('reached max epoch, stop!', file=sys.stderr)\n exit(0)\n elif patience < args.patience:\n patience += 1\n print('hit patience %d' % patience, file=sys.stderr)\n\n if patience == args.patience:\n num_trial += 1\n print('hit #%d trial' % num_trial, file=sys.stderr)\n if num_trial == args.max_num_trial:\n print('early stop!', file=sys.stderr)\n exit(0)\n\n # decay lr, and restore from previously best checkpoint\n lr = optimizer.param_groups[0]['lr'] * args.lr_decay\n print('load previously best model and decay learning rate to %f' % lr, file=sys.stderr)\n\n # load model\n params = torch.load(args.save_to + '.bin', map_location=lambda storage, loc: storage)\n model.load_state_dict(params['state_dict'])\n if args.cuda: model = model.cuda()\n\n # load optimizers\n if args.reset_optimizer:\n print('reset optimizer', file=sys.stderr)\n optimizer = torch.optim.Adam(model.inference_model.parameters(), lr=lr)\n else:\n print('restore parameters of the optimizers', file=sys.stderr)\n optimizer.load_state_dict(torch.load(args.save_to + '.optim.bin'))\n\n # set new lr\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n\n # reset patience\n patience = 0",
"def get_negative_data(self):\n negative_df = pd.DataFrame(columns=HeatStrokeDataFiller.important_features, index=np.arange(self.num_negative))\n for field in negative_df.columns:\n parameter_distribution = HeatStrokeDataFiller.negative_default[field]\n negative_df[field].loc[:] = parameter_distribution(self.num_negative)\n return negative_df",
"def __init__(self, reg_penalty='l2', reg_inv=1.0, k_fold=5, random_state=0):\n print(\"Initialize model Logistic Regression\")\n self.reg_penalty = reg_penalty\n self.reg_inv = reg_inv\n self.k_fold = k_fold\n self.random_state = random_state\n self.model = sklearn.linear_model.LogisticRegression(penalty=self.reg_penalty,\n C=self.reg_inv,\n max_iter=1000, \n random_state=self.random_state)",
"def __init__(self, in_features, out_features):\n\n ########################\n # PUT YOUR CODE HERE #\n #######################\n\n self.in_features = in_features\n self.out_features = out_features\n\n self.__MEAN = 0\n self.__STD = 0.0001\n\n self.params = {\n 'weight': np.random.normal(loc=self.__MEAN, scale=self.__STD, size=(out_features, in_features)), \n 'bias': np.zeros(out_features),\n }\n self.grads = {\n 'weight': None, \n 'bias': None,\n }\n\n self.input_cache = None\n ########################\n # END OF YOUR CODE #\n #######################",
"def train():\n # YOUR TRAINING CODE GOES HERE",
"def test_intent_classifier_del_training_samples(self):\n pass",
"def test_negatives(self):\n model = PoincareModel(self.data, negative=5)\n self.assertEqual(len(model._get_candidate_negatives()), 5)",
"def run(self) -> None:\n self.model = self.trainer.train_model(self.model, self.data)",
"def evaluate(self):\n self.training = False",
"def inference_spa(flow_lik,\n flow_post,\n prior,\n simulator,\n optimizer_lik,\n optimizer_post,\n decay_rate_post,\n x_o,\n x_o_batch_post,\n dim_post,\n prob_prior,\n nbr_lik,\n nbr_epochs_lik,\n nbr_post,\n nbr_epochs_post,\n batch_size,\n batch_size_post,\n epochs_hot_start=10,\n validation_fraction=0.1,\n early_stopping=True,\n stop_after_epochs=20):\n\n nbr_iter = len(prob_prior)\n\n print(\"start full training\")\n\n models_lik = []\n models_post = []\n\n scheduler_post = torch.optim.lr_scheduler.ExponentialLR(optimizer=optimizer_post, gamma=decay_rate_post)\n\n for i in range(nbr_iter):\n\n # decay post lr\n if i >= 1 and decay_rate_post > 0:\n scheduler_post.step()\n\n # print iter info\n print(\"Iteration: \" + str(i + 1))\n print(\"optimizer_post_lr: \" + str(scheduler_post.get_last_lr()))\n print(\"prob_prior: \" + str(prob_prior[i]))\n\n # update likelihood model\n\n nbr_lik_prior = int(prob_prior[i] * nbr_lik[i])\n nbr_like_post = int((1 - prob_prior[i]) * nbr_lik[i])\n\n theta_prior = prior.sample(sample_shape=(nbr_lik_prior,))\n\n if nbr_like_post == 0: # this is to avoid concatunate a tensor with grad to the theta tensor\n theta_full = theta_prior\n else:\n theta_post = flow_post.sample(nbr_like_post, context=x_o) # .reshape(1,dim)\n theta_post = theta_post.reshape((nbr_like_post, dim_post))\n # not sure if this is valid.... Is ok since we sample from a mixture\n theta_prior_check = prior.log_prob(theta_post)\n\n # print(theta_prior_check.shape)\n idx_save = (~torch.isinf(theta_prior_check)).nonzero()\n\n # print(idx_save.shape)\n\n if idx_save.shape[0] > 0:\n theta_post = theta_post[idx_save.reshape(-1), :]\n theta_full = torch.cat([theta_prior, theta_post.detach()], dim=0)\n else:\n theta_full = theta_prior\n\n # remove thetas that are outside of prior\n\n x_full = simulator(theta_full)\n\n _train_like(x_full, theta_full, nbr_epochs_lik[i], batch_size, flow_lik, optimizer_lik,\n validation_fraction, early_stopping, stop_after_epochs)\n\n # update posterior model\n\n # 2' step: train posterior model from prior predictive first, only used to get a hot start\n if i == 0:\n _train_post_prior_pred(x_full, theta_full, epochs_hot_start, batch_size, flow_post, optimizer_post,\n validation_fraction)\n # models_post.append(copy.deepcopy(flow_post))\n\n # Sample training data from posterior\n\n _train_post_sim_fly(nbr_post[i], nbr_epochs_post[i], batch_size_post, flow_post, flow_lik, optimizer_post,\n prior, x_o_batch_post, dim_post, x_o, validation_fraction, early_stopping,\n stop_after_epochs)\n\n # save trained model for each iter\n models_lik.append(copy.deepcopy(flow_lik))\n models_post.append(copy.deepcopy(flow_post))\n\n return models_lik, models_post",
"def negative_gradient(self, y, y_pred, **kargs):",
"def prior_sample(self):\n pass",
"def stop_fit(self):\n self._stop_fit = True",
"def add_uniform_random_negatives(\n ds,\n num_nodes,\n num_negs_per_pos,\n):\n negative_sampler = RandomUniformNegativeSampler(num_nodes, num_negs_per_pos)\n return ds.map(\n negative_sampler, deterministic=False, num_parallel_calls=tf.data.AUTOTUNE\n )",
"def test_no_model(self):\n\n with self.assertRaisesRegex(ValueError,\n 'Please provide a model for this generator'):\n self._gen.generate(\n example=self._example,\n model=None,\n dataset=self._dataset,\n config=self._config)",
"def __init__(self):\n # Initializing the Model with the class\n super(Model, self).__init__()\n # torch.nn.Linear applies a Linear transformation. The first parameter is the size of each input sample. The second is the size of the output sample\n self.linear = torch.nn.Linear(1, 1)",
"def model_create(ARGS):\n\n def retain(ARGS):\n \"\"\"Create the model\"\"\"\n\n # Define the constant for model saving\n reshape_size = ARGS.emb_size + ARGS.numeric_size\n if ARGS.allow_negative:\n embeddings_constraint = FreezePadding()\n beta_activation = 'tanh'\n output_constraint = None\n else:\n embeddings_constraint = FreezePadding_Non_Negative()\n beta_activation = 'sigmoid'\n output_constraint = non_neg()\n\n def reshape(data):\n \"\"\"Reshape the context vectors to 3D vector\"\"\"\n return K.reshape(x=data, shape=(K.shape(data)[0], 1, reshape_size))\n\n # Code Input\n codes = L.Input((None, None), name='codes_input')\n inputs_list = [codes]\n # Calculate embedding for each code and sum them to a visit level\n codes_embs_total = L.Embedding(ARGS.num_codes + 1,\n ARGS.emb_size,\n name='embedding'\n # BUG: embeddings_constraint not supported\n # https://github.com/tensorflow/tensorflow/issues/33755\n # ,embeddings_constraint=embeddings_constraint\n )(codes)\n codes_embs = L.Lambda(lambda x: K.sum(x, axis=2))(codes_embs_total)\n # Numeric input if needed\n if ARGS.numeric_size > 0:\n numerics = L.Input((None, ARGS.numeric_size), name='numeric_input')\n inputs_list.append(numerics)\n full_embs = L.concatenate([codes_embs, numerics], name='catInp')\n else:\n full_embs = codes_embs\n\n # Apply dropout on inputs\n full_embs = L.Dropout(ARGS.dropout_input)(full_embs)\n\n # Time input if needed\n if ARGS.use_time:\n time = L.Input((None, 1), name='time_input')\n inputs_list.append(time)\n time_embs = L.concatenate([full_embs, time], name='catInp2')\n else:\n time_embs = full_embs\n\n # Setup Layers\n # This implementation uses Bidirectional LSTM instead of reverse order\n # (see https://github.com/mp2893/retain/issues/3 for more details)\n\n # If training on GPU and Tensorflow use CuDNNLSTM for much faster training\n if glist:\n alpha = L.Bidirectional(L.CuDNNLSTM(ARGS.recurrent_size, return_sequences=True),\n name='alpha')\n beta = L.Bidirectional(L.CuDNNLSTM(ARGS.recurrent_size, return_sequences=True),\n name='beta')\n else:\n alpha = L.Bidirectional(L.LSTM(ARGS.recurrent_size,\n return_sequences=True, implementation=2),\n name='alpha')\n beta = L.Bidirectional(L.LSTM(ARGS.recurrent_size,\n return_sequences=True, implementation=2),\n name='beta')\n\n alpha_dense = L.Dense(1, kernel_regularizer=l2(ARGS.l2))\n beta_dense = L.Dense(ARGS.emb_size + ARGS.numeric_size,\n activation=beta_activation, kernel_regularizer=l2(ARGS.l2))\n\n # Compute alpha, visit attention\n alpha_out = alpha(time_embs)\n alpha_out = L.TimeDistributed(alpha_dense, name='alpha_dense_0')(alpha_out)\n alpha_out = L.Softmax(axis=1)(alpha_out)\n # Compute beta, codes attention\n beta_out = beta(time_embs)\n beta_out = L.TimeDistributed(beta_dense, name='beta_dense_0')(beta_out)\n # Compute context vector based on attentions and embeddings\n c_t = L.Multiply()([alpha_out, beta_out, full_embs])\n c_t = L.Lambda(lambda x: K.sum(x, axis=1))(c_t)\n # Reshape to 3d vector for consistency between Many to Many and Many to One implementations\n contexts = L.Lambda(reshape)(c_t)\n\n # Make a prediction\n contexts = L.Dropout(ARGS.dropout_context)(contexts)\n output_layer = L.Dense(1, activation='sigmoid', name='dOut',\n kernel_regularizer=l2(ARGS.l2), kernel_constraint=output_constraint)\n\n # TimeDistributed is used for consistency\n # between Many to Many and Many to One implementations\n output = L.TimeDistributed(output_layer, name='time_distributed_out')(contexts)\n # Define the model with appropriate inputs\n model = Model(inputs=inputs_list, outputs=[output])\n\n return model\n\n # Set Tensorflow to grow GPU memory consumption instead of grabbing all of it at once\n # If there are multiple GPUs set up a multi-gpu model\n # Get available gpus , returns empty list if none\n # glist = get_available_gpus()\n glist = []\n if len(glist) > 1:\n with tf.device('/cpu:0'):\n model = retain(ARGS)\n model_final = make_parallel(model, glist)\n else:\n model_final = retain(ARGS)\n\n # Compile the model - adamax has produced best results in our experiments\n model_final.compile(optimizer='adamax',\n loss='binary_crossentropy',\n #TODO: add AUPRC?\n metrics=[Recall(), specificity,\n SpecificityAtSensitivity(0.5,3),\n SensitivityAtSpecificity(0.5, 3),\n 'accuracy', AUC(), Precision()],\n sample_weight_mode=\"temporal\")\n return model_final",
"def perturb_model(args, model, random_seed, env):\n new_model = ES(env.observation_space.shape[0],\n env.action_space,)\n anti_model = ES(env.observation_space.shape[0],\n env.action_space)\n new_model.load_state_dict(model.state_dict())\n anti_model.load_state_dict(model.state_dict())\n np.random.seed(random_seed)\n for (k, v), (anti_k, anti_v) in zip(new_model.es_params(),\n anti_model.es_params()):\n eps = np.random.normal(0, 1, v.size())\n v += torch.from_numpy(args.sigma*eps).float()\n anti_v += torch.from_numpy(args.sigma*-eps).float()\n return [new_model, anti_model]",
"def train_model(self, *args, **kwargs):\n raise NotImplementedError",
"def __neg__(self):\n return self.from_points(-v for v in self._vectors)",
"def initializeWeightsToZero(self):\n\t\t## YOUR CODE BELOW\n\t\t\n\t\tutil.raiseNotDefined()\n\t\treturn",
"def unwhiten_back(self, sample):\n sample = sample*self.Y_std.unsqueeze(1) + self.Y_mean.unsqueeze(1)\n return sample",
"def unwhiten_back(self, sample):\n sample = sample*self.Y_std.unsqueeze(1) + self.Y_mean.unsqueeze(1)\n return sample",
"def negSamplingCostAndGradient(predicted_vc, target, outputVectors_uk, dataset,\n K=10):\n\n # Sampling of indices is done for you. Do not modify this if you\n # wish to match the autograder and receive points!\n indices = [target]\n indices.extend(getNegativeSamples(target, dataset, K))\n\n cost = 0.0\n sigmd_uoT_vc = sigmoid(np.dot(predicted_vc.reshape(-1), outputVectors_uk[target].T))\n cost += -np.log(sigmd_uoT_vc)\n\n gradPred_dJ_vc = np.zeros_like(predicted_vc)\n gradPred_dJ_vc += (sigmd_uoT_vc - 1) * outputVectors_uk[target]\n\n grad_dJ_uw = np.zeros_like(outputVectors_uk)\n grad_dJ_uw[target:target + 1] = (sigmd_uoT_vc - 1) * predicted_vc\n\n neg_samples = []\n for i in range(K):\n j = dataset.sampleTokenIdx()\n if j == target or (j in neg_samples):\n i -= 1 # if negative sample is same with target or already sampled, then resample.\n continue\n neg_samples.append(j)\n\n sigmd_ukT_vc = sigmoid(-np.dot(predicted_vc.reshape(-1), outputVectors_uk[j].T))\n cost += -np.log(sigmd_ukT_vc) # cost for negative sample\n\n grad_dJ_uw[j:j + 1] = (1 - sigmd_ukT_vc) * predicted_vc # gradient for negative sample\n gradPred_dJ_vc += (1 - sigmd_ukT_vc) * outputVectors_uk[j]\n\n return cost, gradPred_dJ_vc, grad_dJ_uw",
"def test_negative_sampling_self_adversarial_loss(self):\n loss_fct = NSSALoss(margin=1.0, adversarial_temperature=1.0)\n self.assertIs(loss_fct._reduction_method, torch.mean)\n\n pos_scores = torch.tensor([0.0, 0.0, -0.5, -0.5])\n neg_scores = torch.tensor([0.0, 0.0, -1.0, -1.0])\n\n # ≈ result of softmax\n weights = torch.tensor([0.37, 0.37, 0.13, 0.13])\n\n # neg_distances - margin = [-1., -1., 0., 0.]\n # sigmoids ≈ [0.27, 0.27, 0.5, 0.5]\n log_sigmoids = torch.tensor([-1.31, -1.31, -0.69, -0.69])\n intermediate = weights * log_sigmoids\n # sum over the softmax dim as weights sum up to 1\n neg_loss = torch.sum(intermediate, dim=-1)\n\n # pos_distances = [0., 0., 0.5, 0.5]\n # margin - pos_distances = [1. 1., 0.5, 0.5]\n # ≈ result of sigmoid\n # sigmoids ≈ [0.73, 0.73, 0.62, 0.62]\n log_sigmoids = torch.tensor([-0.31, -0.31, -0.48, -0.48])\n pos_loss = torch.mean(log_sigmoids)\n\n # expected_loss ≈ 0.34\n expected_loss = (-pos_loss - neg_loss) / 2.0\n\n loss = loss_fct(pos_scores, neg_scores, weights).item()\n\n self.assertAlmostEqual(expected_loss, 0.77, delta=0.02)\n self.assertAlmostEqual(expected_loss, loss, delta=0.02)",
"def false_neg_shuffle(self, continuum: Continuum) -> None:\n for annotator in continuum.annotators:\n security = np.random.choice(continuum._annotations[annotator])\n # security : if an annotator doesnt have any annotations gamma cant be computed.\n for unit in list(continuum[annotator]):\n if np.random.random() < self.magnitude:\n continuum.remove(annotator, unit)\n if len(continuum._annotations[annotator]) == 0:\n continuum.add(annotator, security.segment, security.annotation)",
"def __init__(self):\n self.model = GaussianNB();\n self.X = iris.data\n self.y = iris.target",
"def test_negative_prediction_rate(self):\n # For the penalty, the default loss is hinge.\n expected_penalty_numerator = np.sum(\n np.maximum(0.0, 1.0 - self._penalty_predictions) * self._penalty_weights\n * self._penalty_predicate)\n expected_penalty_denominator = np.sum(\n self._penalty_weights * self._penalty_predicate)\n expected_penalty_value = (\n expected_penalty_numerator / expected_penalty_denominator)\n\n # For the constraint, the default loss is zero-one.\n expected_constraint_numerator = np.sum(\n (0.5 * (1.0 - np.sign(self._constraint_predictions))) *\n self._constraint_weights * self._constraint_predicate)\n expected_constraint_denominator = np.sum(\n self._constraint_weights * self._constraint_predicate)\n expected_constraint_value = (\n expected_constraint_numerator / expected_constraint_denominator)\n\n actual_expression = binary_rates.negative_prediction_rate(self.context)\n self.check_rates(expected_penalty_value, expected_constraint_value,\n actual_expression)"
] | [
"0.6653699",
"0.6541765",
"0.637076",
"0.62263596",
"0.62053776",
"0.61295056",
"0.6102357",
"0.60013276",
"0.5856466",
"0.5846717",
"0.5828895",
"0.58022124",
"0.57946515",
"0.5791492",
"0.5789991",
"0.5780061",
"0.5766371",
"0.576477",
"0.57344276",
"0.5696231",
"0.569474",
"0.5675584",
"0.56594884",
"0.5659008",
"0.5644614",
"0.5642656",
"0.562742",
"0.56200904",
"0.5609411",
"0.56021196",
"0.56021196",
"0.56021196",
"0.56021196",
"0.56021196",
"0.5601778",
"0.55958635",
"0.55834854",
"0.55829924",
"0.5579438",
"0.5566841",
"0.55581266",
"0.55578995",
"0.5554023",
"0.55469835",
"0.55357754",
"0.5534926",
"0.5534721",
"0.5532452",
"0.55303246",
"0.55290097",
"0.55237526",
"0.55193895",
"0.5516533",
"0.5498065",
"0.5491656",
"0.5482101",
"0.54610246",
"0.5453867",
"0.5447777",
"0.5445565",
"0.5443604",
"0.5435557",
"0.5435014",
"0.5434354",
"0.5421097",
"0.54159755",
"0.5411865",
"0.54088116",
"0.54070973",
"0.540611",
"0.5402395",
"0.5400235",
"0.53931576",
"0.5392259",
"0.5391005",
"0.538777",
"0.53868514",
"0.5371183",
"0.5370253",
"0.5367486",
"0.5362911",
"0.5358848",
"0.5356668",
"0.5354789",
"0.5354564",
"0.534648",
"0.53429145",
"0.5339992",
"0.53299654",
"0.53278726",
"0.53254795",
"0.5319828",
"0.53184885",
"0.5313147",
"0.5313147",
"0.5309223",
"0.5308666",
"0.53069764",
"0.5300814",
"0.5298279"
] | 0.61521417 | 5 |
Negative sampling loss function. | def loss_fn(self, hidden: torch.Tensor, pos_embedded: torch.Tensor, neg_embedded: torch.Tensor,
mask: torch.BoolTensor) -> torch.Tensor:
pos_embedded = pos_embedded[:, None, :] # batch_size * pad_len, 1, embedding_dim
pos_score = self.get_score(pos_embedded, hidden) # batch_size * pad_len
pos_contrib = -F.logsigmoid(pos_score)
neg_score = self.get_score(neg_embedded, hidden) # batch_size * pad_len, neg_count
neg_contrib = -torch.log(1 - torch.sigmoid(neg_score)).mean(dim=-1) # batch_size * pad_len
return torch.masked_select(pos_contrib + neg_contrib, mask).mean() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def negative_gradient(self, y, y_pred, **kargs):",
"def loss(A, Y):\n return A - Y",
"def test_negative_sampling_self_adversarial_loss(self):\n loss_fct = NSSALoss(margin=1.0, adversarial_temperature=1.0)\n self.assertIs(loss_fct._reduction_method, torch.mean)\n\n pos_scores = torch.tensor([0.0, 0.0, -0.5, -0.5])\n neg_scores = torch.tensor([0.0, 0.0, -1.0, -1.0])\n\n # ≈ result of softmax\n weights = torch.tensor([0.37, 0.37, 0.13, 0.13])\n\n # neg_distances - margin = [-1., -1., 0., 0.]\n # sigmoids ≈ [0.27, 0.27, 0.5, 0.5]\n log_sigmoids = torch.tensor([-1.31, -1.31, -0.69, -0.69])\n intermediate = weights * log_sigmoids\n # sum over the softmax dim as weights sum up to 1\n neg_loss = torch.sum(intermediate, dim=-1)\n\n # pos_distances = [0., 0., 0.5, 0.5]\n # margin - pos_distances = [1. 1., 0.5, 0.5]\n # ≈ result of sigmoid\n # sigmoids ≈ [0.73, 0.73, 0.62, 0.62]\n log_sigmoids = torch.tensor([-0.31, -0.31, -0.48, -0.48])\n pos_loss = torch.mean(log_sigmoids)\n\n # expected_loss ≈ 0.34\n expected_loss = (-pos_loss - neg_loss) / 2.0\n\n loss = loss_fct(pos_scores, neg_scores, weights).item()\n\n self.assertAlmostEqual(expected_loss, 0.77, delta=0.02)\n self.assertAlmostEqual(expected_loss, loss, delta=0.02)",
"def compute_loss(self):",
"def negative_gradient(self, y, pred, **kargs):\n return y - expit(pred.ravel())",
"def derivative_loss(self, y, y_pred):\n return y_pred - y",
"def ss_loss_(self, batch):\n raise NotImplementedError",
"def __call__(self, target_labels: List[Tensor], fg_probs: Tensor):\n return super(NegativeSampler, self).__call__(target_labels)",
"def compute_loss(self, obs, returns):",
"def get_loss(self, X, Y):\n return tfe.huber_loss(1.0, X - Y)",
"def loss(self, x, y):\n\n return self.loss_fn(x, y)",
"def loss(self, x, y):\n return x",
"def loss(self, **kwargs):\n pass",
"def get_loss(self, x, y):\n \"*** YOUR CODE HERE question 2 ***\"\n return nn.SquareLoss(self.run(x), y)",
"def __neg__(self):\n return TensorWithIndices(-self._tensor, \n self._con + '_' + self._cov)",
"def gen_loss_orig(self, noise_samples):\n generator_samples = self.gen_model(noise_samples)\n logits_gen = self.disc_model(generator_samples)\n # loss = -tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=tf.zeros(logits_gen.shape), logits=logits_gen))\n loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=tf.ones(logits_gen.shape), logits=logits_gen))\n return loss",
"def loss(self, X, y):\n pass",
"def loss(self, x):\n return self._svi.evaluate_loss(*x)",
"def init_negative_sampler(self, unigram_power=3/4, strategy=\"word2vec\"):\n if strategy == \"word2vec\":\n counts = self.target['dst'].value_counts(normalize=True)\n freq = counts.values ** unigram_power\n self.freq = freq / sum(freq)\n self.dst_idxs = counts.index\n self.dst_neg_sampling = lambda size: np.random.choice(self.dst_idxs, size, replace=True, p=self.freq)\n elif strategy == \"uniform\":\n self.dst_neg_sampling = lambda size: np.random.choice(self.unique_dst, size, replace=True)",
"def metric_loss(self, output, sample, *args, **kwargs):\n loss = self.loss(output, sample, *args, **kwargs)\n return loss.item()",
"def global_uniform_negative_sampling(\n self, num_samples, exclude_self_loops=True, replace=False, etype=None\n ):\n raise NotImplementedError(\n \"global_uniform_negative_sampling not implemented yet\"\n )",
"def gen_loss_wasserstein(self, noise_samples):\n generator_samples = self.gen_model(noise_samples)\n logits_gen = self.disc_model(generator_samples)\n\n loss = -tf.reduce_mean(logits_gen)\n return loss",
"def loss(self, y: torch.Tensor, state: AlgorithmState) -> torch.Tensor:\n\n raise NotImplementedError()",
"def get_loss(self):\n raise NotImplementedError",
"def calculate_loss(self, X, y):\n probs = self.predict(X)\n\n num_examples = X.shape[0]\n\n sub = np.subtract(probs, y)\n abs_sum = np.abs(sub)\n sm = np.sum(abs_sum)\n loss = 1 - sm / num_examples\n print(\"Current loss: [ \" + str(\"{:6.5f}\").format(loss) + \" ]\")\n return loss",
"def get_loss(self, xs, y):\n \"*** YOUR CODE HERE ***\"\n predictedY = self.run(xs)\n return nn.SoftmaxLoss(predictedY, y)\n # return nn.SquareLoss(predictedY, y)",
"def compute_loss(self, **kwargs):\n raise NotImplementedError",
"def compute_loss(self, x, label):\n # Forward propagation\n y_hat = self.forward_pass(x)\n return -np.log(y_hat[label])",
"def loss(self, x, y):\n raise NotImplementedError",
"def get_loss(self, xs, y):\n \"*** YOUR CODE HERE question 4 ***\"\n return nn.SoftmaxLoss(self.run(xs), y)",
"def compute_loss(self, x, gt):\n loss = sum([torch.mean((out - gt)**2) for out in self.forward(x)])\n return loss",
"def get_loss(self, x, y):\n \"*** YOUR CODE HERE ***\"\n predictedY = self.run(x)\n return nn.SquareLoss(predictedY, y)",
"def loss_op(self):\n return self.loss",
"def loss_(self, batch):\n raise NotImplementedError",
"def loss_fun(model: GPModel, params: dict) -> float:\n py = model.module.call(params, train_ds['index_points'])\n return -py.log_prob(train_ds['y'])",
"def calc_loss(self, x: np.ndarray, y: np.ndarray) -> float:\n return self.descent.calc_loss(x, y)",
"def unnormalized_loss(self):\n return 0.5 * la.norm(self.resids) ** 2",
"def loss(self, forward, rating):\n return self.loss_fn(forward, rating.float().view(-1))",
"def get_loss(self, x, y):\n \"*** YOUR CODE HERE question 3 ***\"\n return nn.SoftmaxLoss(self.run(x), y)",
"def loss(self, y_pred=None, y_true=None):\n ll = -0.5 * self.const - np.log(self.sigma_y) - 0.5 * (1. / self.sigma_y ** 2) * ((y_pred - y_true) ** 2)\n return -ll.sum(dim=0)",
"def get_loss(self, xs, y):\n \"*** YOUR CODE HERE ***\"\n y_pred = self.run(xs)\n return nn.SoftmaxLoss(y_pred,y)",
"def loss_fn(gr_truth, pred):\n return 100 * dice_loss(pred, gr_truth) + softmax_weighted_loss(pred, gr_truth)",
"def _get_loss(self):\n raise NotImplementedError",
"def ssim_loss(self, x, y):\n loss = (1.0 - self.SSIM(x, y)) * 20\n return loss",
"def loss(self, y_pred=None, y_true=None):\n n = y_pred.shape[0]\n ahat = transform(self.ahat)\n bhat = transform(self.bhat)\n return -1 * (-0.5 * n * self.const + 0.5 * n * (torch.digamma(ahat) - torch.log(bhat)) \\\n - 0.5 * (ahat/bhat) * ((y_pred - y_true) ** 2).sum())",
"def get_loss_fn(self):\n raise NotImplementedError()",
"def compute_loss(self, *args, **kwargs):\n raise NotImplementedError",
"def get_loss(self, samples):\n return self.run_on_samples(self.loss.eval, samples)",
"def test_false_negative_rate(self):\n # For the penalty, the default loss is hinge.\n expected_penalty_numerator = np.sum(\n np.maximum(\n 0.0, 1.0 - self._penalty_predictions) * (self._penalty_labels > 0.0)\n * self._penalty_weights * self._penalty_predicate)\n expected_penalty_denominator = np.sum(\n (self._penalty_labels > 0.0) * self._penalty_weights *\n self._penalty_predicate)\n expected_penalty_value = (\n expected_penalty_numerator / expected_penalty_denominator)\n\n # For the constraint, the default loss is zero-one.\n expected_constraint_numerator = np.sum(\n (0.5 * (1.0 - np.sign(self._constraint_predictions))) *\n (self._constraint_labels > 0.0) * self._constraint_weights *\n self._constraint_predicate)\n expected_constraint_denominator = np.sum(\n (self._constraint_labels > 0.0) * self._constraint_weights *\n self._constraint_predicate)\n expected_constraint_value = (\n expected_constraint_numerator / expected_constraint_denominator)\n\n actual_expression = binary_rates.false_negative_rate(self.context)\n self.check_rates(expected_penalty_value, expected_constraint_value,\n actual_expression)",
"def _neg_loss(outputs: torch.Tensor, targets: torch.Tensor):\n pos_inds = targets.eq(1).float()\n neg_inds = targets.lt(1).float()\n\n neg_weights = torch.pow(1 - targets, 4)\n\n loss = 0\n\n pos_loss = torch.log(outputs) * torch.pow(1 - outputs, 2) * pos_inds\n neg_loss = torch.log(1 - outputs) * torch.pow(outputs, 2) * neg_weights * neg_inds\n\n num_pos = pos_inds.float().sum()\n pos_loss = pos_loss.sum()\n neg_loss = neg_loss.sum()\n\n if num_pos == 0:\n loss = loss - neg_loss\n else:\n loss = loss - (pos_loss + neg_loss) / num_pos\n return loss",
"def loss(self, dataset=None, loss=None, training=None):\n # Recover the defaults, if missing\n dataset, loss = self._resolve_defaults(trainset=dataset, loss=loss)\n # Sample the train batch\n inputs, targets = dataset.sample(self._config)\n # Guess whether computation is for training, if necessary\n if training is None:\n training = torch.is_grad_enabled()\n # Forward pass\n return loss(self.run(inputs), targets, self._params)",
"def s_neg(self):\n running_total = 0\n for i in range(self.prob.num):\n if self.alphas[i] > 1e-5 > self.prob.C - self.deltas[i] and self.prob.Y[i] == -1:\n ayxx = 0\n for j in range(self.prob.num):\n ayxx += self.alphas[j] * self.prob.Y[j] * self.prob.xkernel(self.prob.X[j], self.prob.X[i])\n running_total += -1 - ayxx\n return running_total",
"def __neg__(self):\n return self.coeff_mul(-1)",
"def _initLoss(self):\n\n return torch.nn.MSELoss()",
"def compute_loss(self, o, y):\n if self.regression:\n return (o - y)**2\n else:\n if y > 0:\n return -np.log(o)\n else:\n return -np.log(1-o)",
"def genLoss(self, *data):\r\n _, (x_unlab, _) = data\r\n z = self.getInputNoise(self.hypers['ul_BS'])\r\n fake_logits = self.D(self.G(z))\r\n g_losses = -1*logOneMinusSoftmax(fake_logits)[:,self.D.numClasses-1]\r\n return torch.mean(g_losses)",
"def get_loss(self, x, y):\n \"*** YOUR CODE HERE ***\"\n #make your predictions using run\n #compute loss nn.squareloss\n y_pred = self.run(x)\n return nn.SquareLoss(y_pred,y)",
"def get_loss(fidelity):\n\n return 1 - np.sqrt(fidelity)",
"def compute_loss(self, x, y):\n\n self.batch_size = x.shape[0]\n self.x = x\n self.y = y\n self.soft = self.softmax(x) + 10**(-11)\n out = np.zeros(self.batch_size)\n for i in range(self.batch_size):\n out[i] = -(y[i] @ np.log(self.soft[i]))\n\n return out",
"def backward_loss(self, loss: torch.Tensor, model: Model, optimizer: Optimizer) -> None:\n with amp.scale_loss(loss, optimizer) as scaled_loss:\n scaled_loss.backward()",
"def backward_loss(self, loss: torch.Tensor, model: Model, optimizer: Optimizer) -> None:\n with amp.scale_loss(loss, optimizer) as scaled_loss:\n scaled_loss.backward()",
"def get_loss(self, x, y):\n \"*** YOUR CODE HERE ***\"\n y_pred = self.run(x)\n return nn.SoftmaxLoss(y_pred,y)",
"def get_loss(self, xs, y):\n return nn.SoftmaxLoss(self.run(xs), y)",
"def loss(y, y_pred):\n return 0.5 * np.linalg.norm(y_pred - y) ** 2",
"def test_negative_prediction_rate(self):\n # For the penalty, the default loss is hinge.\n expected_penalty_numerator = np.sum(\n np.maximum(0.0, 1.0 - self._penalty_predictions) * self._penalty_weights\n * self._penalty_predicate)\n expected_penalty_denominator = np.sum(\n self._penalty_weights * self._penalty_predicate)\n expected_penalty_value = (\n expected_penalty_numerator / expected_penalty_denominator)\n\n # For the constraint, the default loss is zero-one.\n expected_constraint_numerator = np.sum(\n (0.5 * (1.0 - np.sign(self._constraint_predictions))) *\n self._constraint_weights * self._constraint_predicate)\n expected_constraint_denominator = np.sum(\n self._constraint_weights * self._constraint_predicate)\n expected_constraint_value = (\n expected_constraint_numerator / expected_constraint_denominator)\n\n actual_expression = binary_rates.negative_prediction_rate(self.context)\n self.check_rates(expected_penalty_value, expected_constraint_value,\n actual_expression)",
"def loss(self) -> KernelLoss:\n return self._loss",
"def loss(self):\n return 'mse'",
"def get_loss(self, x, y):\n \"*** YOUR CODE HERE ***\"\n predictedY = self.run(x)\n return nn.SoftmaxLoss(predictedY, y)",
"def mse_loss(self, x, y):\n loss = tf.reduce_mean(tf.square(x - y))\n return loss",
"def neg_sampling_transform(data):\n train_neg_edge_index = negative_sampling(\n edge_index=data.train_pos_edge_index, num_nodes=data.num_nodes,\n num_neg_samples=data.train_pos_edge_index.size(1))\n data.train_edge_index = torch.cat(\n [data.train_pos_edge_index, train_neg_edge_index], dim=-1)\n data.train_edge_label = create_link_label(data.train_pos_edge_index,\n train_neg_edge_index)\n\n return data",
"def get_loss(self, Loss, results, inputs, device):\n return",
"def negative_sampling(self):\n \n self.train_arr = []\n sample_list = np.random.choice(list(range(self.item_count)), size = 10 * len(self.interactions) * self.num_ns)\n \n sample_idx = 0\n for user, pos_item, _ in self.interactions:\n ns_count = 0\n \n while True:\n neg_item = sample_list[sample_idx]\n if not is_visited(self.rating_mat, user, neg_item):\n self.train_arr.append((user, pos_item, neg_item))\n sample_idx += 1\n ns_count += 1\n if ns_count == self.num_ns:\n break\n \n sample_idx += 1",
"def ls_generator_loss(scores_fake):\r\n loss = torch.mean((scores_fake - 1) ** 2) / 2\r\n return loss",
"def value_loss(self, y_true, y_pred):\n loss = -K.mean(y_pred * y_true, axis=-1)\n return loss",
"def get_bprop_neg(self):\n neg_grad = P.Neg()\n\n def bprop(x, out, dout):\n dx = neg_grad(dout)\n return (dx,)\n return bprop",
"def __loss(self, h, y):\n return (-y*np.log(h)-(1-y)*np.log(1-h)).mean()",
"def get_negative_sample(context, num, prob, Gn):\n\tnegative_list = []\n\twhile len(negative_list) < Gn:\n\t\tnegative_sample = np.random.choice(num, p=prob.ravel())\n\t\tif negative_sample != context:\n\t\t\tnegative_list.append(negative_sample)\n\t\telse:\n\t\t\tpass\n\treturn np.array([negative_list])",
"def hyperopt_loss_function(results: DataFrame, trade_count: int,\n *args, **kwargs) -> float:\n total_profit = results['profit_abs'].sum()\n return -1 * total_profit",
"def single_loss(self):\n\n return self.distribution.rvs()",
"def loss(self, x: np.ndarray, y: np.ndarray, **kwargs) -> np.ndarray:\n raise NotImplementedError",
"def loss(self, X, labels):\n features = self.get_conv_features(X)\n loss = blah\n return loss",
"def get_negative_samples(self, context, sample_size=5) -> np.array:\n while True:\n # Get a batch from the shuffled table\n neg_sample = self.neg_table[self.negative_idx:self.negative_idx + sample_size]\n\n # Update negative index\n self.negative_idx = (self.negative_idx + sample_size) % len(self.neg_table)\n\n # Check if batch insufficient\n if len(neg_sample) != sample_size:\n neg_sample = np.concatenate((neg_sample, self.neg_table[:self.negative_idx]))\n\n # Check if context in negative sample\n if not context in neg_sample:\n return neg_sample",
"def calculate_loss(self, output, target, **kwargs):\n ##dont do aggregation\n raise NotImplementedError",
"def calculate_loss(self, output, target, **kwargs):\n ##dont do aggregation\n raise NotImplementedError",
"def loss(self, X, labels):\n features = self.get_conv_feats(X)\n loss = blah\n return loss",
"def PSNRLoss(y_true, y_pred):\n return -10. * K.log(K.mean(K.square(y_pred - y_true))) / K.log(10.)",
"def test_true_negative_rate(self):\n # For the penalty, the default loss is hinge.\n expected_penalty_numerator = np.sum(\n np.maximum(0.0, 1.0 - self._penalty_predictions) *\n (self._penalty_labels <= 0.0) * self._penalty_weights *\n self._penalty_predicate)\n expected_penalty_denominator = np.sum(\n (self._penalty_labels <= 0.0) * self._penalty_weights *\n self._penalty_predicate)\n expected_penalty_value = (\n expected_penalty_numerator / expected_penalty_denominator)\n\n # For the constraint, the default loss is zero-one.\n expected_constraint_numerator = np.sum(\n (0.5 * (1.0 - np.sign(self._constraint_predictions))) *\n (self._constraint_labels <= 0.0) * self._constraint_weights *\n self._constraint_predicate)\n expected_constraint_denominator = np.sum(\n (self._constraint_labels <= 0.0) * self._constraint_weights *\n self._constraint_predicate)\n expected_constraint_value = (\n expected_constraint_numerator / expected_constraint_denominator)\n\n actual_expression = binary_rates.true_negative_rate(self.context)\n self.check_rates(expected_penalty_value, expected_constraint_value,\n actual_expression)",
"def tversky_loss(yhat, ytrue):\n return torch.mean(1 - tversky_index(yhat, ytrue))",
"def get_nobackprop_loss(self) -> Dict[str, tt.Tensor]:\n return {k: dy.nobackprop(v) for k, v in self.expr_factors.items()}",
"def loss_fn(self, pred: Tensor, true: Tensor) -> Tensor:\n pass",
"def loss_compact(y, a):\n return -1 * (y * np.log10(a) + (1 - y) * np.log10(1 - a))",
"def negative_mse ( target_array ):\n return -1 * mse ( target_array )\n # End negative_mse()",
"def dice_coef_loss(y_true, y_pred):\n return -dice_coef(y_true, y_pred, 1)",
"def loss(self):\n return self._loss",
"def _generator_loss(self, y_hat):\n\n l = -tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels = tf.zeros(tf.shape(y_hat)),logits = y_hat ))\n print('generatorloss shape',tf.shape(l))\n return l",
"def configure_loss_fn(self) -> nn.Module:\n pass",
"def negSamplingCostAndGradient(predicted, target, outputVectors, dataset,\n K=10):\n\n # Sampling of indices is done for you. Do not modify this if you\n # wish to match the autograder and receive points!\n indices = [target]\n indices.extend(getNegativeSamples(target, dataset, K))\n\n ### YOUR CODE HERE\n grad = np.zeros_like(outputVectors)\n gradPred = np.zeros_like(predicted)\n cost = 0.0\n probability = 0.0\n for sample_idx in indices:\n similarity = outputVectors[sample_idx].dot(predicted.T)\n probability = sigmoid(similarity) # squash to 0 ~ 1\n if sample_idx == target: # positive sample\n #p = sigmoid(outputVectors[sample_idx].dot(predicted.T))\n cost += -np.log(sigmoid(similarity))\n else: # negative sample\n #p = sigmoid(-outputVectors[sample_idx].dot(predicted.T))\n cost += -np.log(sigmoid(-similarity)) # deduction from reference 2.\n \n if sample_idx == target:\n grad[sample_idx, :] += (probability - 1) * predicted\n gradPred += (probability - 1) * outputVectors[sample_idx]\n else:\n grad[sample_idx, :] += probability * predicted\n gradPred += probability * outputVectors[sample_idx]\n '''\n V, D = outputVectors.shape\n one_hot_target = np.zeros(V)\n one_hot_target[target] = 1\n cost = 0\n gradPred = np.zeros_like(predicted)\n grad = np.zeros_like(outputVectors)\n \n for idx in indices:\n context_vector = outputVectors[idx] # embedding vector (1, D)\n cosine_similarity = normalizeRows(predicted).dot(normalizeRows(context_vector).T)\n print('neg sample, consine_similarity={0}'.format(cosine_similarity))\n binary_class = sigmoid(cosine_similarity)\n print('neg sample, binary_class={0}'.format(binary_class))\n \n if idx == target:\n cost += binary_class - 1\n else:\n cost += binary_class\n \n dlogits = sigmoid_grad(cosine_similarity)\n #gradPred += dlogits * normalizeRows(context_vector)\n #grad += np.outer(one_hot_target, dlogits * normalizeRows(predicted))\n gradPred += dlogits\n grad += np.outer(one_hot_target, dlogits)\n '''\n ### END YOUR CODE\n\n return cost, gradPred, grad",
"def negSamplingCostAndGradient(predicted_vc, target, outputVectors_uk, dataset,\n K=10):\n\n # Sampling of indices is done for you. Do not modify this if you\n # wish to match the autograder and receive points!\n indices = [target]\n indices.extend(getNegativeSamples(target, dataset, K))\n\n cost = 0.0\n sigmd_uoT_vc = sigmoid(np.dot(predicted_vc.reshape(-1), outputVectors_uk[target].T))\n cost += -np.log(sigmd_uoT_vc)\n\n gradPred_dJ_vc = np.zeros_like(predicted_vc)\n gradPred_dJ_vc += (sigmd_uoT_vc - 1) * outputVectors_uk[target]\n\n grad_dJ_uw = np.zeros_like(outputVectors_uk)\n grad_dJ_uw[target:target + 1] = (sigmd_uoT_vc - 1) * predicted_vc\n\n neg_samples = []\n for i in range(K):\n j = dataset.sampleTokenIdx()\n if j == target or (j in neg_samples):\n i -= 1 # if negative sample is same with target or already sampled, then resample.\n continue\n neg_samples.append(j)\n\n sigmd_ukT_vc = sigmoid(-np.dot(predicted_vc.reshape(-1), outputVectors_uk[j].T))\n cost += -np.log(sigmd_ukT_vc) # cost for negative sample\n\n grad_dJ_uw[j:j + 1] = (1 - sigmd_ukT_vc) * predicted_vc # gradient for negative sample\n gradPred_dJ_vc += (1 - sigmd_ukT_vc) * outputVectors_uk[j]\n\n return cost, gradPred_dJ_vc, grad_dJ_uw",
"def standard_generator_loss(disc_gen):\n loss_obj = tf.keras.losses.BinaryCrossentropy(from_logits=True)\n return loss_obj(tf.ones_like(disc_gen), disc_gen)",
"def get_negative(self, index=None, rng=None):\n\n if index is None:\n # Random negative\n if self._negative_pool is None:\n self._preselect_negatives(1, rng=rng)\n\n # sample sequentially from the presampled pool\n tr = self._negative_pool.iloc[self._negative_idx]\n self._negative_idx += 1\n\n # resample the pool once we reach the end\n if self._negative_idx == len(self._negative_pool):\n self._negative_pool = None\n else:\n # the negative pool must have been initialized\n if self._negative_pool is None:\n raise MissingNegativePool((\n 'A presampled negative pool does not exist for this '\n 'target, but it is required to access the specific '\n 'negative index={}. Did you forget to call '\n '_preselect_negatives?'\n ).format(index))\n tr = self._negative_pool.iloc[index]\n return tr",
"def getLoss(self, x_test, t_test):\n x_t = Variable(x_test, requires_grad=False)\n #Feed inputes into neural network\n t_pred = self.model(x_t)\n #Now lets compute out loss\n loss = self.loss_fn(t_pred, t_test)\n return loss"
] | [
"0.71352494",
"0.70853186",
"0.7057606",
"0.66854465",
"0.6680998",
"0.666433",
"0.6643865",
"0.65241724",
"0.65200275",
"0.65122724",
"0.6505412",
"0.6476094",
"0.6475439",
"0.64674455",
"0.6447396",
"0.6428077",
"0.6416268",
"0.64070994",
"0.63982755",
"0.6386199",
"0.6369115",
"0.63603085",
"0.63390297",
"0.6322059",
"0.6318505",
"0.6298779",
"0.628736",
"0.62864846",
"0.6276517",
"0.6266169",
"0.6262509",
"0.62576526",
"0.6255952",
"0.6241143",
"0.623567",
"0.62324053",
"0.62207276",
"0.62193024",
"0.62164736",
"0.6207838",
"0.6205262",
"0.61954314",
"0.61944735",
"0.61882",
"0.6187095",
"0.6180016",
"0.6177939",
"0.61751914",
"0.6168245",
"0.6165842",
"0.6165742",
"0.61585444",
"0.6149686",
"0.61328954",
"0.6125942",
"0.6121112",
"0.61204094",
"0.611982",
"0.6119572",
"0.61191416",
"0.61191416",
"0.61181664",
"0.61088836",
"0.6091726",
"0.60914594",
"0.60570425",
"0.6030092",
"0.60256827",
"0.6009721",
"0.59955275",
"0.5988323",
"0.5981104",
"0.5979122",
"0.5969323",
"0.59689885",
"0.59561664",
"0.5953374",
"0.595151",
"0.5950413",
"0.59467876",
"0.59402233",
"0.59271556",
"0.592282",
"0.592282",
"0.59200615",
"0.59197503",
"0.5917251",
"0.5914354",
"0.59131384",
"0.59125715",
"0.5906385",
"0.59062684",
"0.5906095",
"0.5905931",
"0.5904431",
"0.5899943",
"0.5899858",
"0.5897476",
"0.58960676",
"0.5892576",
"0.5890128"
] | 0.0 | -1 |
cast(itkLightObject obj) > itkTernaryAddImageFilterID2ID2ID2ID2_Superclass | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkReinitializeLevelSetImageFilterIF2_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def itkCosImageFilterID2ID2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVectorExpandImageFilterIVF22IVF22 *\":\n return _itkVectorExpandImageFilterPython.itkVectorExpandImageFilterIVF22IVF22_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF2IF2SE2 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkBinaryGrindPeakImageFilterISS2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS2_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)"
] | [
"0.83064026",
"0.8255792",
"0.8157398",
"0.81537783",
"0.8102101",
"0.8073973",
"0.80381954",
"0.800663",
"0.7993588",
"0.79927564",
"0.7985042",
"0.79630977",
"0.79616165",
"0.77700764",
"0.7733867",
"0.7707605",
"0.7705628",
"0.767445",
"0.7638304",
"0.761557",
"0.7609519",
"0.75994295",
"0.75710046",
"0.7551219",
"0.75173587",
"0.7508863",
"0.7503406",
"0.748901",
"0.74517256",
"0.7450225",
"0.7446922",
"0.744494",
"0.7429376",
"0.74230814",
"0.74074453",
"0.74003863",
"0.7398386",
"0.7390212",
"0.73892885",
"0.73825586",
"0.73716486",
"0.7370101",
"0.73628134",
"0.73369366",
"0.73341596",
"0.7331751",
"0.7310098",
"0.7287144",
"0.72870094",
"0.7286204",
"0.72798294",
"0.7272765",
"0.7271641",
"0.7267257",
"0.72564673",
"0.72539014",
"0.7246979",
"0.7244459",
"0.7234195",
"0.72237426",
"0.721257",
"0.7207516",
"0.7207079",
"0.7200597",
"0.72001326",
"0.72000664",
"0.7199652",
"0.7177761",
"0.7177096",
"0.7175992",
"0.7166617",
"0.7164269",
"0.715199",
"0.7151739",
"0.71500677",
"0.7148743",
"0.7116756",
"0.71127623",
"0.71101093",
"0.70921594",
"0.709067",
"0.708972",
"0.70843583",
"0.7082843",
"0.7081238",
"0.7077933",
"0.7073626",
"0.7065453",
"0.7064809",
"0.705971",
"0.70541424",
"0.70537984",
"0.704467",
"0.7039384",
"0.70391405",
"0.70375884",
"0.7033067",
"0.7032742",
"0.702964",
"0.7005698"
] | 0.7650135 | 18 |
New() > itkTernaryAddImageFilterID2ID2ID2ID2_Superclass Create a new object of the class itkTernaryAddImageFilterID2ID2ID2ID2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMD2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF2IF2F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass"
] | [
"0.762808",
"0.75490963",
"0.74511015",
"0.7423488",
"0.7417358",
"0.73689806",
"0.7338099",
"0.72890794",
"0.7233506",
"0.72322845",
"0.71680635",
"0.7162203",
"0.7157877",
"0.7050944",
"0.7018189",
"0.7015261",
"0.701475",
"0.7012063",
"0.69447744",
"0.69441223",
"0.6889131",
"0.6881423",
"0.6868967",
"0.6853723",
"0.68408406",
"0.6826385",
"0.6813031",
"0.68121004",
"0.68096787",
"0.6790982",
"0.67853516",
"0.6779387",
"0.67491055",
"0.67461556",
"0.67240286",
"0.66872597",
"0.66388506",
"0.6636771",
"0.6580478",
"0.65689754",
"0.6560821",
"0.65537447",
"0.65536624",
"0.65393394",
"0.6538611",
"0.6536759",
"0.6529571",
"0.6528005",
"0.6503665",
"0.6499846",
"0.64975405",
"0.64933676",
"0.6492796",
"0.64857596",
"0.6459219",
"0.64535993",
"0.64484006",
"0.6439381",
"0.64381754",
"0.6438147",
"0.6435963",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077",
"0.64312077"
] | 0.7865722 | 0 |
itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterID2ID2ID2ID2_Superclass | def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkCosImageFilterID2ID2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def itkReinitializeLevelSetImageFilterIF2_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args)"
] | [
"0.84598845",
"0.830569",
"0.83006763",
"0.829275",
"0.82689846",
"0.82017225",
"0.8179489",
"0.8175326",
"0.81749785",
"0.8088286",
"0.80781823",
"0.8049702",
"0.8036825",
"0.802763",
"0.80256134",
"0.8021667",
"0.7969493",
"0.7960738",
"0.79595184",
"0.7879262",
"0.78750664",
"0.7869362",
"0.76886404",
"0.7679488",
"0.7677076",
"0.76714647",
"0.76687956",
"0.7627246",
"0.762539",
"0.7585055",
"0.75762296",
"0.7574734",
"0.75713134",
"0.7549467",
"0.75304633",
"0.75136065",
"0.7512272",
"0.75098",
"0.75041366",
"0.7495222",
"0.74910885",
"0.74497557",
"0.74333704",
"0.74258727",
"0.7423837",
"0.7423205",
"0.7420782",
"0.7418653",
"0.7403477",
"0.73920894",
"0.73895943",
"0.73611885",
"0.73390347",
"0.7328029",
"0.7323098",
"0.7294147",
"0.7284964",
"0.7275436",
"0.72340524",
"0.7231759",
"0.72268194",
"0.72251093",
"0.7216264",
"0.72091216",
"0.7207557",
"0.71966505",
"0.7174245",
"0.71739966",
"0.7163794",
"0.7144445",
"0.7122528",
"0.7120315",
"0.7073085",
"0.70382255",
"0.7036256",
"0.7034319",
"0.70220286",
"0.698322",
"0.69581205",
"0.69514775",
"0.6932811",
"0.69245905",
"0.69142497",
"0.691283",
"0.68960255",
"0.68937165",
"0.6888677",
"0.68741286",
"0.6873151",
"0.6856007",
"0.6849347",
"0.68479264",
"0.68388623",
"0.6812614",
"0.6808472",
"0.67980886",
"0.6794937",
"0.6786653",
"0.67839277",
"0.67699504"
] | 0.83009154 | 2 |
cast(itkLightObject obj) > itkTernaryAddImageFilterID3ID3ID3ID3_Superclass | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkReinitializeLevelSetImageFilterIF3_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIUC3IUC3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUC3IUC3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS3_cast(obj)"
] | [
"0.8447806",
"0.835408",
"0.8331328",
"0.83247644",
"0.8303352",
"0.81888425",
"0.81718343",
"0.8150934",
"0.8132018",
"0.8111063",
"0.80385387",
"0.8013818",
"0.79792774",
"0.796343",
"0.79049927",
"0.7870615",
"0.7870506",
"0.7843389",
"0.780859",
"0.77116466",
"0.76478946",
"0.76178384",
"0.7598225",
"0.7587527",
"0.75734526",
"0.7566176",
"0.7542837",
"0.7493631",
"0.74737597",
"0.7469733",
"0.7455996",
"0.74467206",
"0.7430233",
"0.74274063",
"0.74266034",
"0.7412569",
"0.74021363",
"0.7399048",
"0.7380209",
"0.73737335",
"0.73733675",
"0.73667276",
"0.73652416",
"0.7363726",
"0.7359667",
"0.7359265",
"0.73520386",
"0.73479944",
"0.73465437",
"0.7328952",
"0.7324021",
"0.731517",
"0.7314092",
"0.73100054",
"0.7301942",
"0.7298538",
"0.7280126",
"0.72695005",
"0.7268499",
"0.72583944",
"0.72413576",
"0.7234132",
"0.7233672",
"0.72300357",
"0.7227969",
"0.7223953",
"0.7221498",
"0.7221337",
"0.71974814",
"0.7182638",
"0.7178013",
"0.71732014",
"0.71663",
"0.7161472",
"0.71544564",
"0.71535254",
"0.714953",
"0.71460277",
"0.71414506",
"0.7136289",
"0.71299356",
"0.71243024",
"0.7120319",
"0.71194285",
"0.71185625",
"0.7117197",
"0.7112681",
"0.71012104",
"0.7098324",
"0.7089492",
"0.70860255",
"0.7082413",
"0.7080654",
"0.70798635",
"0.70789695",
"0.7066611",
"0.7065165",
"0.7056379",
"0.7054453",
"0.7054189"
] | 0.75060236 | 27 |
New() > itkTernaryAddImageFilterID3ID3ID3ID3_Superclass Create a new object of the class itkTernaryAddImageFilterID3ID3ID3ID3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMD3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterID3ID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.77666956",
"0.76079875",
"0.75039846",
"0.75035",
"0.7479952",
"0.74662477",
"0.7298497",
"0.72855514",
"0.72777826",
"0.72717106",
"0.72706074",
"0.7184689",
"0.7160855",
"0.71421075",
"0.71154994",
"0.7073263",
"0.7060131",
"0.70412266",
"0.7006315",
"0.6987071",
"0.6958632",
"0.6957956",
"0.6952372",
"0.69433296",
"0.6930342",
"0.6929591",
"0.6909524",
"0.68802714",
"0.68798286",
"0.68414164",
"0.6833543",
"0.6829974",
"0.67244756",
"0.67212075",
"0.6683199",
"0.6672721",
"0.6663152",
"0.6661161",
"0.6646651",
"0.6607785",
"0.65967184",
"0.65950596",
"0.65939146",
"0.6583086",
"0.6579946",
"0.65575945",
"0.65560687",
"0.65491426",
"0.65424585",
"0.65285105",
"0.65231603",
"0.6519719",
"0.6519004",
"0.65118635",
"0.65087384",
"0.6508564",
"0.64990246",
"0.6487123",
"0.6482788",
"0.6474858",
"0.64729327",
"0.64726603",
"0.6471554",
"0.6470353",
"0.646242",
"0.6460919",
"0.64606005",
"0.6454159",
"0.6452408",
"0.64323795",
"0.6425929",
"0.6425477",
"0.64132774",
"0.6408568",
"0.6402476",
"0.63816184",
"0.63814765",
"0.63763165",
"0.6374971",
"0.6371901",
"0.6367627",
"0.6363571",
"0.63607806",
"0.63539404",
"0.63480186",
"0.63254166",
"0.6324686",
"0.6323199",
"0.63223004",
"0.6317195",
"0.6312495",
"0.6311391",
"0.63084584",
"0.63074845",
"0.6303715",
"0.62951386",
"0.62935287",
"0.6287701",
"0.6286513",
"0.62839484"
] | 0.7919466 | 0 |
itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterID3ID3ID3ID3_Superclass | def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def itkCosImageFilterID2ID2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)"
] | [
"0.8736071",
"0.868161",
"0.85678107",
"0.855728",
"0.8525033",
"0.84827745",
"0.8458611",
"0.84521455",
"0.8401546",
"0.8398676",
"0.82971007",
"0.82100916",
"0.819524",
"0.8174809",
"0.8162183",
"0.8155681",
"0.8120304",
"0.8097459",
"0.80840564",
"0.8081543",
"0.8037756",
"0.7939386",
"0.79020196",
"0.78987044",
"0.7884879",
"0.78641385",
"0.7858922",
"0.78411174",
"0.78248566",
"0.7823652",
"0.7803847",
"0.7780468",
"0.7767989",
"0.7761252",
"0.77409524",
"0.7721213",
"0.7720115",
"0.7717677",
"0.7635514",
"0.7630841",
"0.7618568",
"0.75953466",
"0.75920737",
"0.75535905",
"0.75520873",
"0.7548576",
"0.75409156",
"0.75065213",
"0.75031435",
"0.7499483",
"0.748945",
"0.7456242",
"0.74427474",
"0.74206877",
"0.74024487",
"0.7336944",
"0.73290193",
"0.7307567",
"0.7288266",
"0.72713226",
"0.7258725",
"0.72460216",
"0.7239958",
"0.72357816",
"0.72345847",
"0.72337484",
"0.7232972",
"0.72202903",
"0.72096854",
"0.7169896",
"0.7158978",
"0.71587723",
"0.71380216",
"0.7135994",
"0.7123875",
"0.7123824",
"0.7096071",
"0.7079615",
"0.70780396",
"0.70359343",
"0.70288277",
"0.7025094",
"0.7013637",
"0.70129675",
"0.69715846",
"0.6963969",
"0.69328815",
"0.69185275",
"0.69165885",
"0.6908514",
"0.69061625",
"0.69043666",
"0.686409",
"0.68574125",
"0.6831297",
"0.68264306",
"0.6821603",
"0.6819662",
"0.6813479",
"0.68114907"
] | 0.8572596 | 2 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkReinitializeLevelSetImageFilterIF2_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF2IF2SE2 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)"
] | [
"0.83766204",
"0.8343197",
"0.81994504",
"0.8033622",
"0.8021271",
"0.7993956",
"0.79668874",
"0.7955408",
"0.79487544",
"0.7893651",
"0.78932303",
"0.7893183",
"0.7889645",
"0.78552145",
"0.7831324",
"0.7804832",
"0.7798998",
"0.77956307",
"0.7756702",
"0.7714334",
"0.7674037",
"0.7651036",
"0.76196563",
"0.76091486",
"0.7576926",
"0.75757253",
"0.75647247",
"0.7550381",
"0.7543138",
"0.75225025",
"0.7508518",
"0.75076413",
"0.7499027",
"0.7495133",
"0.7489453",
"0.7482838",
"0.74786454",
"0.7467435",
"0.7459076",
"0.7446038",
"0.7427453",
"0.74170524",
"0.7416157",
"0.74082094",
"0.74037164",
"0.7392436",
"0.73903507",
"0.73756236",
"0.73492676",
"0.73386145",
"0.7337928",
"0.7337066",
"0.7317239",
"0.7316006",
"0.73115736",
"0.72988725",
"0.7281022",
"0.726597",
"0.72654366",
"0.7259183",
"0.7252371",
"0.724912",
"0.7248577",
"0.72278595",
"0.7227212",
"0.72188133",
"0.72137624",
"0.7208157",
"0.7203426",
"0.7194396",
"0.71931463",
"0.718985",
"0.71770775",
"0.7172459",
"0.71697986",
"0.7151004",
"0.715008",
"0.714952",
"0.7144803",
"0.71342355",
"0.71337783",
"0.7131575",
"0.71243775",
"0.7121177",
"0.71183425",
"0.71183145",
"0.710852",
"0.7106006",
"0.7084644",
"0.7073208",
"0.70716023",
"0.7068438",
"0.70664746",
"0.7043522",
"0.7042838",
"0.7040648",
"0.70364106",
"0.70349604",
"0.7034859",
"0.7030452"
] | 0.787697 | 13 |
New() > itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass Create a new object of the class itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF2IF2F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIF2IF2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseIF2F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMD2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.78903687",
"0.7618442",
"0.7587125",
"0.7554806",
"0.7497415",
"0.7490903",
"0.7460643",
"0.7349206",
"0.73199266",
"0.7273647",
"0.7265122",
"0.7225199",
"0.72199607",
"0.7203722",
"0.7196587",
"0.71783614",
"0.71586716",
"0.7110979",
"0.7084644",
"0.70412594",
"0.70065254",
"0.69687927",
"0.69413334",
"0.69385123",
"0.69342095",
"0.692517",
"0.6903904",
"0.69023705",
"0.68812555",
"0.68713856",
"0.6863494",
"0.68611187",
"0.6853313",
"0.68433493",
"0.6824195",
"0.68139464",
"0.68010247",
"0.67541444",
"0.6734946",
"0.6728645",
"0.67284906",
"0.6726305",
"0.67191046",
"0.66672164",
"0.6661116",
"0.66572917",
"0.66561437",
"0.6653537",
"0.66506773",
"0.6649708",
"0.6639779",
"0.66283834",
"0.65790075",
"0.6576881",
"0.657178",
"0.6567623",
"0.65589005",
"0.6558234",
"0.6555975",
"0.65524876",
"0.65489185",
"0.6547198",
"0.6545457",
"0.6543456",
"0.654121",
"0.65259117",
"0.65181404",
"0.65156275",
"0.6514442",
"0.6503828",
"0.6491043",
"0.6489731",
"0.6485087",
"0.64825594",
"0.64815575",
"0.6479638",
"0.6478717",
"0.64740103",
"0.64626193",
"0.6460863",
"0.64600575",
"0.6459985",
"0.6457657",
"0.6455343",
"0.6454468",
"0.64522994",
"0.6451989",
"0.64420754",
"0.6430841",
"0.642578",
"0.64220065",
"0.64174706",
"0.6409563",
"0.63979703",
"0.63931143",
"0.6392817",
"0.6392551",
"0.6383436",
"0.63759077",
"0.6372885"
] | 0.8061528 | 0 |
itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass | def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def itkCosImageFilterID2ID2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def itkReinitializeLevelSetImageFilterIF2_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)"
] | [
"0.88076603",
"0.8572629",
"0.85263824",
"0.84869593",
"0.84707963",
"0.8447603",
"0.8407247",
"0.83666754",
"0.8311725",
"0.82673705",
"0.82649153",
"0.8258107",
"0.8230317",
"0.82138693",
"0.82006955",
"0.8125339",
"0.8115553",
"0.80756104",
"0.8072043",
"0.8067883",
"0.8057454",
"0.805316",
"0.8037226",
"0.79890996",
"0.7967445",
"0.79483676",
"0.7922309",
"0.7908108",
"0.7870035",
"0.78291464",
"0.7819505",
"0.7785192",
"0.7749189",
"0.7746251",
"0.7728861",
"0.77254635",
"0.7715707",
"0.7693082",
"0.7690588",
"0.76308084",
"0.7596186",
"0.7589676",
"0.75864965",
"0.75859463",
"0.7577603",
"0.7571898",
"0.7555655",
"0.7553492",
"0.75244045",
"0.7516835",
"0.7513383",
"0.7511888",
"0.75116086",
"0.74968714",
"0.7491872",
"0.74870914",
"0.7470444",
"0.74693555",
"0.74597484",
"0.7441578",
"0.742601",
"0.74247336",
"0.7411488",
"0.7411022",
"0.7362208",
"0.73460996",
"0.73452264",
"0.73298264",
"0.7326668",
"0.73045635",
"0.72703326",
"0.72675693",
"0.7258289",
"0.72460383",
"0.7238294",
"0.72379625",
"0.7228665",
"0.72019255",
"0.71988547",
"0.71970266",
"0.7186006",
"0.7183771",
"0.7171419",
"0.71684283",
"0.71438414",
"0.71410245",
"0.71377844",
"0.7121939",
"0.71102005",
"0.709148",
"0.708611",
"0.70833606",
"0.70775884",
"0.7061119",
"0.7046836",
"0.7035861",
"0.7035842",
"0.7024578",
"0.69873375",
"0.6983074"
] | 0.8668573 | 1 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkReinitializeLevelSetImageFilterIF3_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkBinaryContourImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIUC3IUC3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUC3IUC3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)"
] | [
"0.84215736",
"0.84034926",
"0.8305896",
"0.81551635",
"0.81245995",
"0.8102471",
"0.80973864",
"0.80775636",
"0.80774724",
"0.8057766",
"0.8046051",
"0.8026812",
"0.80027497",
"0.78837883",
"0.7882408",
"0.78676045",
"0.7847785",
"0.78235435",
"0.77957475",
"0.774796",
"0.77218664",
"0.7695079",
"0.7686479",
"0.7646834",
"0.76299185",
"0.76203686",
"0.7597839",
"0.75977737",
"0.7590967",
"0.7582078",
"0.75569797",
"0.7540848",
"0.75305927",
"0.7528407",
"0.75265586",
"0.7513346",
"0.75113183",
"0.7502275",
"0.7497328",
"0.7493329",
"0.7492843",
"0.74876046",
"0.7478193",
"0.7478017",
"0.7459185",
"0.7444435",
"0.7444201",
"0.7440204",
"0.74328935",
"0.743068",
"0.7419224",
"0.7412324",
"0.73951924",
"0.73940444",
"0.7389329",
"0.73862785",
"0.73857164",
"0.7375331",
"0.73741114",
"0.736125",
"0.7361055",
"0.73556334",
"0.7353102",
"0.7351639",
"0.73437995",
"0.73376906",
"0.73289716",
"0.7328005",
"0.7325443",
"0.7310195",
"0.7306132",
"0.7296023",
"0.72921824",
"0.7283906",
"0.72829074",
"0.7278229",
"0.7277316",
"0.7260219",
"0.7255728",
"0.72500813",
"0.72486234",
"0.72469074",
"0.723845",
"0.7235679",
"0.72203404",
"0.7208216",
"0.71909446",
"0.7172402",
"0.7168694",
"0.7165956",
"0.716331",
"0.7161246",
"0.7159644",
"0.7151383",
"0.7140246",
"0.7133995",
"0.71278405",
"0.7121961",
"0.71203524",
"0.71147823"
] | 0.77168095 | 21 |
New() > itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass Create a new object of the class itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMD3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterID3ID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIF3IF3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.78577113",
"0.7592699",
"0.7584402",
"0.74809074",
"0.7461834",
"0.74548477",
"0.7382315",
"0.7356721",
"0.7290633",
"0.72715217",
"0.7263018",
"0.7174303",
"0.7171283",
"0.7165325",
"0.7131815",
"0.7109765",
"0.7042447",
"0.7011581",
"0.69516927",
"0.69480693",
"0.6946724",
"0.6933001",
"0.6930969",
"0.6927596",
"0.691683",
"0.6914075",
"0.68908465",
"0.68890315",
"0.6888107",
"0.6853245",
"0.68419653",
"0.6838311",
"0.68382186",
"0.68248415",
"0.6816648",
"0.6811242",
"0.6751306",
"0.67391014",
"0.6721149",
"0.67128146",
"0.67102396",
"0.67090833",
"0.668938",
"0.6685186",
"0.66692406",
"0.6661329",
"0.6659841",
"0.6649977",
"0.66408294",
"0.66200614",
"0.6615238",
"0.6604896",
"0.6603292",
"0.660277",
"0.66026425",
"0.65990096",
"0.6592166",
"0.65805036",
"0.65392613",
"0.6537326",
"0.6525912",
"0.65248984",
"0.6523544",
"0.6519565",
"0.64962494",
"0.6490978",
"0.64901876",
"0.6483086",
"0.6475579",
"0.6473966",
"0.64737743",
"0.6471888",
"0.64681864",
"0.6466159",
"0.6447577",
"0.644688",
"0.64448565",
"0.6440349",
"0.6438833",
"0.643248",
"0.6432285",
"0.6429459",
"0.64294416",
"0.64239717",
"0.64223486",
"0.642074",
"0.6417441",
"0.6395638",
"0.6388201",
"0.6383215",
"0.63575965",
"0.6356744",
"0.63420427",
"0.6338099",
"0.63281405",
"0.63259786",
"0.63005847",
"0.62936926",
"0.62901425",
"0.6284717"
] | 0.7961178 | 0 |
itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass | def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkReinitializeLevelSetImageFilterIF3_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)"
] | [
"0.889163",
"0.872401",
"0.8716521",
"0.8686031",
"0.86727256",
"0.8645202",
"0.86261636",
"0.85349524",
"0.8480945",
"0.84491646",
"0.8429972",
"0.8390216",
"0.8305966",
"0.82382905",
"0.82324976",
"0.81321186",
"0.81260115",
"0.8123955",
"0.80859166",
"0.80845314",
"0.8056589",
"0.8052327",
"0.804861",
"0.80232453",
"0.79935324",
"0.7979531",
"0.7918457",
"0.790081",
"0.7882919",
"0.78771764",
"0.78757423",
"0.78687286",
"0.7834224",
"0.7821914",
"0.77973664",
"0.7780019",
"0.7761909",
"0.7758668",
"0.7742168",
"0.7733768",
"0.7720162",
"0.7705115",
"0.7698066",
"0.7686954",
"0.76733816",
"0.76555705",
"0.7591251",
"0.75884044",
"0.7584062",
"0.75665855",
"0.7523",
"0.75051546",
"0.75034946",
"0.7491759",
"0.74739",
"0.7462549",
"0.7452624",
"0.74195",
"0.73998076",
"0.738923",
"0.7374469",
"0.7348272",
"0.73204833",
"0.73116475",
"0.7305259",
"0.7301896",
"0.7295791",
"0.7295609",
"0.7280351",
"0.7255241",
"0.725381",
"0.7238254",
"0.72294486",
"0.72262496",
"0.7225047",
"0.7212472",
"0.719597",
"0.7194971",
"0.71829104",
"0.71703154",
"0.7152478",
"0.7105492",
"0.7104622",
"0.70941675",
"0.7092794",
"0.7086375",
"0.70840293",
"0.70808566",
"0.70751953",
"0.70696557",
"0.70627904",
"0.70584375",
"0.7039907",
"0.70066583",
"0.700515",
"0.6985799",
"0.69725996",
"0.6970351",
"0.6967105",
"0.6965134"
] | 0.87746197 | 1 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkBinaryContourImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkLabelStatisticsImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)"
] | [
"0.834111",
"0.8290173",
"0.81825304",
"0.81166714",
"0.8085342",
"0.8076864",
"0.8007342",
"0.7972806",
"0.7964573",
"0.7946949",
"0.7939894",
"0.7933654",
"0.78695387",
"0.7853671",
"0.7838806",
"0.78119445",
"0.7795307",
"0.7725836",
"0.77124554",
"0.77036095",
"0.77026665",
"0.7653346",
"0.76478887",
"0.76389384",
"0.76101786",
"0.7576824",
"0.7572978",
"0.75709367",
"0.7551426",
"0.7546779",
"0.7546012",
"0.75395465",
"0.7533605",
"0.7502286",
"0.74788135",
"0.7477353",
"0.7465633",
"0.746091",
"0.74560803",
"0.74558574",
"0.74547344",
"0.74346876",
"0.74319875",
"0.7428785",
"0.74274915",
"0.7421879",
"0.7411972",
"0.74076235",
"0.7402408",
"0.74016833",
"0.7401615",
"0.7401332",
"0.73977995",
"0.7393169",
"0.7389296",
"0.7388594",
"0.7384186",
"0.73753196",
"0.73673296",
"0.73615056",
"0.7356254",
"0.73453593",
"0.7341921",
"0.732828",
"0.73269176",
"0.73265654",
"0.7319608",
"0.7306974",
"0.7305013",
"0.73013943",
"0.72987705",
"0.7298136",
"0.72969574",
"0.7296741",
"0.72965854",
"0.7295675",
"0.72936666",
"0.7289284",
"0.72890216",
"0.72853255",
"0.7283301",
"0.728253",
"0.72806174",
"0.7277977",
"0.7271643",
"0.72607327",
"0.7253232",
"0.72519267",
"0.72497636",
"0.72495455",
"0.7242577",
"0.72417074",
"0.7239114",
"0.7229861",
"0.7229183",
"0.7224229",
"0.7223503",
"0.7217481",
"0.7216864",
"0.7213487"
] | 0.7777628 | 17 |
New() > itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass Create a new object of the class itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryGrindPeakImageFilterIUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def __init__(self, *args):\n _itkRGBAPixelPython.itkRGBAPixelUC_swiginit(self,_itkRGBAPixelPython.new_itkRGBAPixelUC(*args))",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF2IF2F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def __init__(self, *args):\n _itkRGBAPixelPython.itkRGBAPixelUS_swiginit(self,_itkRGBAPixelPython.new_itkRGBAPixelUS(*args))",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.77771425",
"0.777552",
"0.7686807",
"0.7643416",
"0.7587692",
"0.75774217",
"0.73941374",
"0.73560876",
"0.73111916",
"0.7292542",
"0.72911847",
"0.72615075",
"0.7257298",
"0.72467935",
"0.71607935",
"0.71508795",
"0.7145032",
"0.71084183",
"0.7099382",
"0.70673823",
"0.7050572",
"0.7045471",
"0.7041566",
"0.70305765",
"0.70130855",
"0.6998498",
"0.6995835",
"0.6992183",
"0.6981598",
"0.6981034",
"0.6961082",
"0.69393855",
"0.6918511",
"0.6881062",
"0.686167",
"0.6845795",
"0.683925",
"0.68319595",
"0.682794",
"0.6820989",
"0.6800176",
"0.6797705",
"0.676686",
"0.67477083",
"0.6734028",
"0.6731201",
"0.67295253",
"0.6724561",
"0.6715601",
"0.6712391",
"0.6700528",
"0.6699888",
"0.66781706",
"0.6667914",
"0.6663648",
"0.6655373",
"0.6651471",
"0.6642366",
"0.6637028",
"0.6634593",
"0.6626621",
"0.6609832",
"0.6605605",
"0.6591239",
"0.657454",
"0.65732557",
"0.657125",
"0.6570826",
"0.65659165",
"0.65542907",
"0.65373963",
"0.65346116",
"0.6532553",
"0.652629",
"0.6523616",
"0.65092003",
"0.65056884",
"0.6504626",
"0.65039176",
"0.6502405",
"0.64995146",
"0.6497969",
"0.6494798",
"0.6488347",
"0.6485823",
"0.6474255",
"0.6472626",
"0.6472368",
"0.6467774",
"0.6458366",
"0.6446817",
"0.64294255",
"0.64275557",
"0.6419447",
"0.63963556",
"0.6393314",
"0.63909906",
"0.63810134",
"0.63586193",
"0.63515174"
] | 0.79703885 | 0 |
itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass | def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUC2IUC2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC2IUC2_Superclass_cast(*args)",
"def itkCosImageFilterID2ID2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)"
] | [
"0.8583774",
"0.8431196",
"0.8420218",
"0.83839583",
"0.8363702",
"0.83492815",
"0.82900065",
"0.82717526",
"0.82178116",
"0.8217027",
"0.8207864",
"0.820718",
"0.8156194",
"0.80921507",
"0.8092105",
"0.8075868",
"0.8073345",
"0.8053059",
"0.8038925",
"0.8010338",
"0.79771775",
"0.7878296",
"0.7858739",
"0.78540045",
"0.78158575",
"0.7761325",
"0.77299756",
"0.77172834",
"0.76812",
"0.76592106",
"0.75847954",
"0.7580109",
"0.7571314",
"0.75418264",
"0.7536644",
"0.75109154",
"0.749912",
"0.749236",
"0.74798036",
"0.74708974",
"0.7468846",
"0.74602497",
"0.74584424",
"0.7455958",
"0.74511933",
"0.7449512",
"0.74456567",
"0.74432695",
"0.743407",
"0.7425522",
"0.74242365",
"0.73952514",
"0.7366331",
"0.7365958",
"0.7357395",
"0.73482466",
"0.73389363",
"0.73130816",
"0.7302516",
"0.7301498",
"0.7295768",
"0.7294007",
"0.72891927",
"0.72877485",
"0.72386044",
"0.7199203",
"0.71895283",
"0.7170882",
"0.71592784",
"0.71581334",
"0.71560746",
"0.7154176",
"0.71461886",
"0.7137874",
"0.713362",
"0.7131443",
"0.7118752",
"0.71155643",
"0.70710367",
"0.70277524",
"0.70198137",
"0.70130587",
"0.69998825",
"0.6984251",
"0.69700587",
"0.69698256",
"0.6952719",
"0.69509596",
"0.6929945",
"0.6928639",
"0.69157684",
"0.6907163",
"0.69067425",
"0.6905051",
"0.6902837",
"0.6899569",
"0.689722",
"0.6896464",
"0.68897516",
"0.6885952"
] | 0.85070795 | 1 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkBinaryGrindPeakImageFilterIUC3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)"
] | [
"0.8382613",
"0.8278823",
"0.8251991",
"0.82148737",
"0.8203227",
"0.8143775",
"0.81393623",
"0.8138548",
"0.81275445",
"0.80943424",
"0.8089351",
"0.7965766",
"0.79550606",
"0.79431623",
"0.79347086",
"0.791383",
"0.7902391",
"0.7832134",
"0.7724162",
"0.7658042",
"0.76455724",
"0.7633209",
"0.7627889",
"0.76273125",
"0.76250964",
"0.7577801",
"0.75771296",
"0.7521578",
"0.7498902",
"0.74934477",
"0.74885863",
"0.74822885",
"0.7478715",
"0.7467593",
"0.74542385",
"0.7435685",
"0.7434372",
"0.74264467",
"0.74237823",
"0.7423653",
"0.74196225",
"0.7389833",
"0.7381608",
"0.7380905",
"0.7380743",
"0.73736787",
"0.7367684",
"0.7367559",
"0.7364324",
"0.73637176",
"0.7357858",
"0.7355718",
"0.7332468",
"0.73305357",
"0.73294926",
"0.7325157",
"0.7309981",
"0.73098576",
"0.7299921",
"0.72944677",
"0.7289744",
"0.72817916",
"0.727925",
"0.7265162",
"0.72650224",
"0.72632825",
"0.72506094",
"0.72480154",
"0.7241734",
"0.723745",
"0.7233245",
"0.72308564",
"0.7228834",
"0.7223907",
"0.72238",
"0.72210395",
"0.7213394",
"0.7210624",
"0.7206547",
"0.7205713",
"0.7204293",
"0.7194895",
"0.7190539",
"0.7188296",
"0.7185383",
"0.71846783",
"0.71820647",
"0.718151",
"0.7180686",
"0.7171531",
"0.71663475",
"0.71631825",
"0.7161991",
"0.71602625",
"0.7155098",
"0.7153062",
"0.71514505",
"0.7148433",
"0.71458143",
"0.71440524"
] | 0.756046 | 27 |
New() > itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass Create a new object of the class itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMD3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkBinaryGrindPeakImageFilterIUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.78084457",
"0.7777113",
"0.7743919",
"0.7609945",
"0.75737077",
"0.7548231",
"0.73495024",
"0.7265904",
"0.7243554",
"0.7237153",
"0.7220631",
"0.7212902",
"0.72032726",
"0.7179373",
"0.7160813",
"0.7156109",
"0.7147796",
"0.71228814",
"0.71199644",
"0.70842755",
"0.70775044",
"0.7048789",
"0.7013777",
"0.70094776",
"0.69766766",
"0.6965889",
"0.69315624",
"0.6931002",
"0.68471116",
"0.6837332",
"0.68076605",
"0.68035424",
"0.6800819",
"0.67954487",
"0.6782856",
"0.6774538",
"0.674813",
"0.6742164",
"0.6732201",
"0.67066747",
"0.6705994",
"0.66978544",
"0.6684161",
"0.66723484",
"0.66718554",
"0.6663342",
"0.665686",
"0.6654328",
"0.66523963",
"0.66475666",
"0.66274256",
"0.66235983",
"0.66230357",
"0.6620882",
"0.65786964",
"0.6571336",
"0.656798",
"0.6565916",
"0.6560259",
"0.6540255",
"0.65345716",
"0.653434",
"0.65331507",
"0.6527986",
"0.6517651",
"0.6505436",
"0.6475651",
"0.64621073",
"0.64528024",
"0.64469236",
"0.6439845",
"0.64312154",
"0.6425973",
"0.6408796",
"0.6408071",
"0.64075774",
"0.6380759",
"0.638015",
"0.63776803",
"0.6373315",
"0.63732725",
"0.63725317",
"0.63724476",
"0.63649637",
"0.6363737",
"0.6363415",
"0.63590527",
"0.6356755",
"0.6351628",
"0.63378495",
"0.63283676",
"0.63249713",
"0.6324965",
"0.63244826",
"0.6319516",
"0.6290054",
"0.6282476",
"0.6271692",
"0.62662476",
"0.6258678"
] | 0.7868488 | 0 |
itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass | def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def itkCosImageFilterID2ID2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args)"
] | [
"0.8532647",
"0.85305256",
"0.8516313",
"0.849879",
"0.84499377",
"0.84234107",
"0.8399052",
"0.83886224",
"0.8355858",
"0.8344065",
"0.83316773",
"0.8331413",
"0.83286583",
"0.82926065",
"0.8276312",
"0.8266724",
"0.826375",
"0.8225655",
"0.8214009",
"0.8193666",
"0.8193466",
"0.81501955",
"0.78534245",
"0.7805978",
"0.7802641",
"0.7802427",
"0.7762043",
"0.7754395",
"0.77441114",
"0.7734065",
"0.7731937",
"0.7716808",
"0.76786023",
"0.7658318",
"0.76227117",
"0.7620756",
"0.7618745",
"0.7617573",
"0.7608669",
"0.76030463",
"0.75958604",
"0.7591009",
"0.75828576",
"0.75654703",
"0.755549",
"0.75538474",
"0.75513923",
"0.7531908",
"0.7513318",
"0.74726784",
"0.74716574",
"0.74581164",
"0.7453174",
"0.744025",
"0.7436473",
"0.7435019",
"0.74100006",
"0.7397446",
"0.7386418",
"0.73500013",
"0.7341015",
"0.73409873",
"0.7340898",
"0.7322006",
"0.7321723",
"0.7308609",
"0.7283741",
"0.7275204",
"0.72708666",
"0.72322536",
"0.72307575",
"0.72053915",
"0.7198638",
"0.7186513",
"0.7178378",
"0.7087075",
"0.7081885",
"0.7077293",
"0.7071108",
"0.70702916",
"0.7067063",
"0.7058104",
"0.7032287",
"0.7025628",
"0.7023475",
"0.7014788",
"0.6987418",
"0.6982805",
"0.6960359",
"0.69482505",
"0.69360447",
"0.69291335",
"0.69125056",
"0.68873006",
"0.6872989",
"0.6871058",
"0.68408865",
"0.680471",
"0.6775395",
"0.67752445"
] | 0.86180055 | 0 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkReinitializeLevelSetImageFilterIF2_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)"
] | [
"0.8231043",
"0.8183794",
"0.8179944",
"0.813997",
"0.8107713",
"0.81054693",
"0.8088916",
"0.8019836",
"0.80192333",
"0.8010472",
"0.80005395",
"0.7982409",
"0.7888748",
"0.7857484",
"0.7852095",
"0.78295845",
"0.7784547",
"0.7752002",
"0.7742223",
"0.76902217",
"0.76497644",
"0.760706",
"0.76034373",
"0.75609416",
"0.75573194",
"0.755303",
"0.7544214",
"0.7544203",
"0.75191766",
"0.7500881",
"0.7491752",
"0.74878216",
"0.74759066",
"0.7474353",
"0.743822",
"0.7424705",
"0.74246913",
"0.74135476",
"0.74078727",
"0.73842335",
"0.7383383",
"0.73749065",
"0.73659754",
"0.7363378",
"0.73468256",
"0.7320944",
"0.731616",
"0.731168",
"0.7306846",
"0.7299899",
"0.7291015",
"0.7285988",
"0.7274383",
"0.7257386",
"0.7257152",
"0.7248489",
"0.72455883",
"0.7243175",
"0.7242874",
"0.72306275",
"0.72301936",
"0.7212565",
"0.7211282",
"0.7198137",
"0.7192627",
"0.71894944",
"0.7187292",
"0.71741706",
"0.71648896",
"0.71616936",
"0.7149811",
"0.7143392",
"0.713862",
"0.71380746",
"0.7132426",
"0.7121055",
"0.71186996",
"0.71079904",
"0.71069705",
"0.71017057",
"0.70994353",
"0.7093223",
"0.708888",
"0.7083606",
"0.7083277",
"0.7078411",
"0.70714533",
"0.7067104",
"0.7063751",
"0.7060881",
"0.7060417",
"0.70589954",
"0.70557326",
"0.70542043",
"0.705332",
"0.70494163",
"0.7048891",
"0.70467436",
"0.7043439",
"0.70402455"
] | 0.76787597 | 20 |
New() > itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass Create a new object of the class itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass"
] | [
"0.76136214",
"0.75883645",
"0.75875884",
"0.7529568",
"0.74792403",
"0.7449526",
"0.74375355",
"0.7260869",
"0.7239844",
"0.7108787",
"0.70131814",
"0.69569594",
"0.6922677",
"0.6912971",
"0.68873686",
"0.687519",
"0.6859417",
"0.6853394",
"0.68529296",
"0.68343085",
"0.67922634",
"0.67798215",
"0.6776158",
"0.6775277",
"0.6766944",
"0.67660344",
"0.67555183",
"0.67503774",
"0.67455685",
"0.67155784",
"0.6707758",
"0.6695102",
"0.6694806",
"0.66915137",
"0.66845864",
"0.6682871",
"0.66796273",
"0.6675649",
"0.6640752",
"0.66348857",
"0.6624691",
"0.6605654",
"0.66006505",
"0.6578076",
"0.6571781",
"0.6570224",
"0.65692466",
"0.65687424",
"0.65676796",
"0.65546644",
"0.65460324",
"0.6526072",
"0.6523611",
"0.64802825",
"0.6449989",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061",
"0.6446061"
] | 0.77367264 | 0 |
itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass | def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkCosImageFilterID2ID2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args)",
"def itkReinitializeLevelSetImageFilterIF2_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def itkBoundedReciprocalImageFilterIUC2IUC2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)"
] | [
"0.83347076",
"0.8259786",
"0.8254877",
"0.82273674",
"0.8213395",
"0.81952333",
"0.81827587",
"0.81737477",
"0.81565833",
"0.81444824",
"0.81440467",
"0.8123669",
"0.81183785",
"0.8090652",
"0.80894965",
"0.8077059",
"0.80663335",
"0.80111235",
"0.79480046",
"0.7916144",
"0.78519887",
"0.77994204",
"0.7795864",
"0.7681974",
"0.76066643",
"0.7603395",
"0.7585965",
"0.7562357",
"0.7551937",
"0.7528142",
"0.75018805",
"0.74961185",
"0.7490753",
"0.74622697",
"0.7453293",
"0.7441071",
"0.74337554",
"0.74317175",
"0.7422717",
"0.74123615",
"0.7408654",
"0.7401311",
"0.7397836",
"0.7376786",
"0.73696494",
"0.7353332",
"0.7347112",
"0.73401874",
"0.73202324",
"0.73059213",
"0.7272508",
"0.7244763",
"0.7237921",
"0.7220896",
"0.7216889",
"0.7211088",
"0.7206864",
"0.7201985",
"0.7180831",
"0.71400666",
"0.71292865",
"0.71003914",
"0.7071545",
"0.70539033",
"0.7044466",
"0.7036088",
"0.7028768",
"0.7003677",
"0.6988571",
"0.698773",
"0.69592834",
"0.694199",
"0.6934275",
"0.69114053",
"0.68910414",
"0.6887675",
"0.6879606",
"0.6865163",
"0.6829058",
"0.67853826",
"0.67505497",
"0.67447233",
"0.67383975",
"0.67245764",
"0.67005694",
"0.6694156",
"0.6684689",
"0.6681876",
"0.6661077",
"0.6659302",
"0.6656348",
"0.664412",
"0.66287476",
"0.66261935",
"0.66226554",
"0.6607615",
"0.66002315",
"0.6592641",
"0.6588278",
"0.6586991"
] | 0.8331188 | 1 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkReinitializeLevelSetImageFilterIF3_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)"
] | [
"0.8344029",
"0.83224463",
"0.8284969",
"0.8224202",
"0.82228005",
"0.82063365",
"0.81694835",
"0.8165365",
"0.80582345",
"0.80344105",
"0.8034188",
"0.80162996",
"0.80015427",
"0.7995259",
"0.7987952",
"0.7976403",
"0.7948404",
"0.7799154",
"0.77925974",
"0.7752949",
"0.77080846",
"0.76979566",
"0.76705486",
"0.76638985",
"0.76001453",
"0.75661147",
"0.7506618",
"0.7499618",
"0.74933225",
"0.74922967",
"0.7481771",
"0.747491",
"0.7464645",
"0.7460878",
"0.74466527",
"0.7441096",
"0.7430311",
"0.7429931",
"0.7419063",
"0.7402705",
"0.7391296",
"0.7385964",
"0.73813546",
"0.73626536",
"0.7358555",
"0.734528",
"0.73374",
"0.7332965",
"0.7331655",
"0.7324391",
"0.7313323",
"0.73049873",
"0.72945774",
"0.7288617",
"0.7286364",
"0.7284393",
"0.72839594",
"0.72803825",
"0.72778267",
"0.7271438",
"0.7256759",
"0.72523427",
"0.7252208",
"0.72477853",
"0.7235256",
"0.72322273",
"0.7222204",
"0.7214747",
"0.7193915",
"0.718701",
"0.71826893",
"0.71814346",
"0.7170842",
"0.71699095",
"0.7165886",
"0.71426743",
"0.7135445",
"0.7130718",
"0.7130117",
"0.7128474",
"0.7119615",
"0.7118588",
"0.7116621",
"0.71163833",
"0.7106928",
"0.7105173",
"0.71043646",
"0.71018296",
"0.7098583",
"0.7087837",
"0.70876664",
"0.70670605",
"0.70591754",
"0.7057923",
"0.7055011",
"0.7051964",
"0.70424443",
"0.7032264",
"0.7029532",
"0.7026104"
] | 0.7512756 | 26 |
New() > itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass Create a new object of the class itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMD3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass"
] | [
"0.77514553",
"0.765869",
"0.76537627",
"0.7553569",
"0.749837",
"0.7385497",
"0.7229729",
"0.72107583",
"0.71896565",
"0.71452445",
"0.71168566",
"0.7091152",
"0.70909774",
"0.7024852",
"0.70056397",
"0.6996257",
"0.69950944",
"0.69864553",
"0.69812405",
"0.69450486",
"0.6925556",
"0.69152147",
"0.6881612",
"0.6864559",
"0.68400246",
"0.68397975",
"0.68339914",
"0.6825365",
"0.67540365",
"0.67413706",
"0.67316574",
"0.6700439",
"0.66808397",
"0.6678612",
"0.66746545",
"0.6661264",
"0.66423386",
"0.66186225",
"0.6587629",
"0.6567584",
"0.65584666",
"0.6539007",
"0.6532006",
"0.653056",
"0.6526402",
"0.65203226",
"0.6517797",
"0.6506174",
"0.6497966",
"0.64964545",
"0.6483809",
"0.6460132",
"0.6448885",
"0.6448142",
"0.6437597",
"0.64341766",
"0.6426018",
"0.6425957",
"0.6423818",
"0.6420804",
"0.6414967",
"0.64054114",
"0.63838434",
"0.6380012",
"0.63763213",
"0.6359671",
"0.6353768",
"0.6344847",
"0.63409144",
"0.63183",
"0.6313218",
"0.6311289",
"0.63111675",
"0.63089734",
"0.6303271",
"0.628186",
"0.6274752",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583",
"0.62596583"
] | 0.7761621 | 0 |
itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass | def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def itkCosImageFilterID2ID2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)"
] | [
"0.85246396",
"0.84796506",
"0.84340036",
"0.84302866",
"0.84288764",
"0.8423453",
"0.8416096",
"0.83491045",
"0.8327571",
"0.83225805",
"0.82702595",
"0.8268719",
"0.82654333",
"0.8251263",
"0.82445973",
"0.8234296",
"0.8218139",
"0.8206639",
"0.81782633",
"0.8108415",
"0.8069083",
"0.8034119",
"0.7786232",
"0.77656925",
"0.77128786",
"0.7711452",
"0.7677795",
"0.7654404",
"0.7647236",
"0.7645991",
"0.76412964",
"0.76381004",
"0.7633403",
"0.76329887",
"0.7619616",
"0.76060176",
"0.7605165",
"0.75952184",
"0.75926864",
"0.7584363",
"0.7574614",
"0.7574139",
"0.7542787",
"0.7533851",
"0.7508731",
"0.74792707",
"0.7455325",
"0.7446694",
"0.74402314",
"0.7436174",
"0.74279195",
"0.7417896",
"0.7412612",
"0.7400141",
"0.7351367",
"0.734125",
"0.73352396",
"0.7332331",
"0.72528565",
"0.72312814",
"0.7223558",
"0.7218232",
"0.7200889",
"0.7198832",
"0.7192009",
"0.7180258",
"0.7178877",
"0.7166869",
"0.71382076",
"0.7132521",
"0.7127095",
"0.7117685",
"0.70735544",
"0.6989159",
"0.6943567",
"0.6937737",
"0.6937387",
"0.69337934",
"0.69310033",
"0.69304615",
"0.69209015",
"0.69189185",
"0.6906876",
"0.68816835",
"0.6879068",
"0.686872",
"0.6863454",
"0.6844334",
"0.6831889",
"0.6807486",
"0.67981875",
"0.6751361",
"0.6724057",
"0.67173797",
"0.67108816",
"0.67088425",
"0.666981",
"0.6650783",
"0.6642026",
"0.66258526"
] | 0.85044765 | 1 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS2IUS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUS2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_cast(obj)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkBinaryContourImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS2IUS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_cast(obj)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkLabelStatisticsImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS2IUS2IUS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS2IUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2ISS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)"
] | [
"0.8304301",
"0.82490915",
"0.81188726",
"0.80280966",
"0.7972961",
"0.78848064",
"0.7863343",
"0.7826107",
"0.7789981",
"0.7746478",
"0.7744639",
"0.7724042",
"0.771654",
"0.7712445",
"0.7704052",
"0.76961714",
"0.7681886",
"0.767606",
"0.7668913",
"0.76313215",
"0.7622179",
"0.7618332",
"0.75832456",
"0.7572826",
"0.7550871",
"0.75299394",
"0.7495356",
"0.7469125",
"0.7454271",
"0.74493587",
"0.74463356",
"0.74322456",
"0.7424613",
"0.7421344",
"0.7405788",
"0.7404294",
"0.7379592",
"0.73788345",
"0.7370223",
"0.73611426",
"0.7354825",
"0.7347413",
"0.73367494",
"0.73187727",
"0.73095655",
"0.73075205",
"0.7294445",
"0.72827274",
"0.7270859",
"0.72434616",
"0.72363836",
"0.72267336",
"0.7225206",
"0.7218467",
"0.7212916",
"0.72112966",
"0.72111285",
"0.7204532",
"0.7203989",
"0.7185076",
"0.71775407",
"0.71770203",
"0.7175211",
"0.71601176",
"0.71597236",
"0.7156316",
"0.71504915",
"0.7146249",
"0.71436787",
"0.7142329",
"0.7140638",
"0.7139662",
"0.7137499",
"0.7135203",
"0.7126055",
"0.7102741",
"0.71024036",
"0.70996225",
"0.7091689",
"0.7088896",
"0.7085804",
"0.708423",
"0.70790935",
"0.7072066",
"0.70701927",
"0.7069088",
"0.70519865",
"0.7049831",
"0.7046435",
"0.70388323",
"0.7019938",
"0.70159847",
"0.70025724",
"0.70021987",
"0.7000889",
"0.69887584",
"0.69862217",
"0.69762385",
"0.6976138",
"0.6971799"
] | 0.77324975 | 11 |
New() > itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass Create a new object of the class itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryGrindPeakImageFilterIUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass"
] | [
"0.7596377",
"0.75910646",
"0.7560997",
"0.75100327",
"0.74321425",
"0.73726755",
"0.73566854",
"0.734661",
"0.7272658",
"0.72386825",
"0.7217317",
"0.7106021",
"0.7098527",
"0.70696133",
"0.7002581",
"0.6983039",
"0.6974576",
"0.69361895",
"0.6912299",
"0.68864566",
"0.6862588",
"0.6853599",
"0.68524754",
"0.6848617",
"0.6841335",
"0.6814946",
"0.6791793",
"0.6789456",
"0.67701083",
"0.6760358",
"0.67310727",
"0.6716998",
"0.670478",
"0.66943973",
"0.664881",
"0.6643523",
"0.66214424",
"0.66144234",
"0.6614326",
"0.659081",
"0.6562268",
"0.6542881",
"0.6528058",
"0.6526112",
"0.65230846",
"0.65227056",
"0.6520851",
"0.6501615",
"0.64999753",
"0.649874",
"0.64983445",
"0.64970106",
"0.6496829",
"0.6483747",
"0.64756453",
"0.6472251",
"0.6465084",
"0.6461237",
"0.6457734",
"0.644084",
"0.6408411",
"0.64083564",
"0.640657",
"0.6394307",
"0.639152",
"0.63722676",
"0.6369723",
"0.63688517",
"0.63612926",
"0.63367",
"0.6334947",
"0.6293576",
"0.62925017",
"0.62921923",
"0.6279671",
"0.62619746",
"0.6259935",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981",
"0.6245981"
] | 0.7922314 | 0 |
itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass | def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkCosImageFilterID2ID2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUS2IUS2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS2IUS2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUC2IUC2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)"
] | [
"0.8769365",
"0.8522652",
"0.839778",
"0.83974636",
"0.8386833",
"0.8381579",
"0.8350641",
"0.8332937",
"0.8298384",
"0.8288761",
"0.824455",
"0.82303673",
"0.8217982",
"0.82114774",
"0.81883687",
"0.8180019",
"0.8107185",
"0.8103616",
"0.8074904",
"0.80426836",
"0.8036381",
"0.7998272",
"0.7987268",
"0.79579437",
"0.780325",
"0.77993983",
"0.77519345",
"0.769762",
"0.7661356",
"0.7617688",
"0.76151466",
"0.75940067",
"0.7589619",
"0.7576392",
"0.75207037",
"0.7514116",
"0.75138426",
"0.75114655",
"0.7480147",
"0.7477699",
"0.7477096",
"0.7476146",
"0.7447043",
"0.74280196",
"0.74266064",
"0.74251485",
"0.74221826",
"0.74061364",
"0.7388003",
"0.7384987",
"0.7352962",
"0.73488206",
"0.7345603",
"0.73271626",
"0.73198235",
"0.73147404",
"0.731437",
"0.7312223",
"0.73112285",
"0.7307",
"0.7287668",
"0.7280064",
"0.7260006",
"0.7259262",
"0.7256215",
"0.7232388",
"0.72312033",
"0.720083",
"0.71996874",
"0.71661174",
"0.7161119",
"0.7158443",
"0.71490884",
"0.71246004",
"0.70724446",
"0.7066089",
"0.7058892",
"0.70536673",
"0.70535815",
"0.7051724",
"0.70429504",
"0.70331264",
"0.70029616",
"0.6997591",
"0.6978759",
"0.6958639",
"0.69438154",
"0.6936197",
"0.692253",
"0.691779",
"0.6892745",
"0.68925697",
"0.68898267",
"0.68807685",
"0.6876029",
"0.68720114",
"0.6865854",
"0.68611085",
"0.6860876",
"0.68572396"
] | 0.83809143 | 6 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS2IUS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkBinaryContourImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUS3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIUS3IUS3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUS3IUS3SE3_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUS2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)"
] | [
"0.83137727",
"0.8301095",
"0.82114863",
"0.8178836",
"0.8119993",
"0.79578906",
"0.79196596",
"0.78718543",
"0.78582305",
"0.7827678",
"0.78161174",
"0.7789821",
"0.7776616",
"0.7749188",
"0.7730019",
"0.7708901",
"0.7708324",
"0.7677274",
"0.7676748",
"0.765284",
"0.76485854",
"0.76299924",
"0.76129377",
"0.759367",
"0.75406814",
"0.75357664",
"0.7514312",
"0.75092643",
"0.74905443",
"0.7486773",
"0.7485491",
"0.74750197",
"0.7467983",
"0.745994",
"0.7456651",
"0.7421191",
"0.7420005",
"0.7405873",
"0.7386333",
"0.73757905",
"0.7344383",
"0.7311919",
"0.73083425",
"0.72943544",
"0.7279216",
"0.72591335",
"0.7247129",
"0.7242617",
"0.7231983",
"0.7228835",
"0.7228313",
"0.7224353",
"0.7222945",
"0.72181195",
"0.7215697",
"0.72153246",
"0.7207509",
"0.7200813",
"0.7200199",
"0.7189163",
"0.71875393",
"0.71808434",
"0.71801835",
"0.7178197",
"0.71609974",
"0.7160869",
"0.71587956",
"0.7151986",
"0.7148653",
"0.71479446",
"0.713962",
"0.7136084",
"0.7120715",
"0.7119688",
"0.71167463",
"0.7100738",
"0.710073",
"0.70979106",
"0.7095384",
"0.70909536",
"0.7089251",
"0.70881283",
"0.7086319",
"0.7081005",
"0.7077099",
"0.706863",
"0.70515263",
"0.7051447",
"0.7050438",
"0.7047568",
"0.7047114",
"0.7045784",
"0.7044856",
"0.7044767",
"0.70310587",
"0.70305264",
"0.7016695",
"0.701451",
"0.7008694",
"0.7004938"
] | 0.7613641 | 22 |
New() > itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass Create a new object of the class itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryGrindPeakImageFilterIUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMD3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkVTKPolyDataReaderMF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUS3IUS3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init_subclass__(self, *args, **kwargs): # real signature unknown\n pass"
] | [
"0.76287913",
"0.7548939",
"0.7542766",
"0.74978733",
"0.74911535",
"0.73237395",
"0.7311357",
"0.7223127",
"0.7221928",
"0.7203291",
"0.711763",
"0.70980036",
"0.7094449",
"0.70661205",
"0.7045585",
"0.69802654",
"0.69774556",
"0.6953183",
"0.6930293",
"0.6927179",
"0.69252926",
"0.68612796",
"0.6853266",
"0.68411696",
"0.68341136",
"0.6805854",
"0.676833",
"0.676823",
"0.67421156",
"0.6724591",
"0.67157364",
"0.66891545",
"0.66867435",
"0.6681363",
"0.66381276",
"0.6633663",
"0.66273004",
"0.6620194",
"0.6616909",
"0.65887594",
"0.6572548",
"0.65508956",
"0.6550005",
"0.6517347",
"0.6485657",
"0.64830244",
"0.6475291",
"0.6462602",
"0.644652",
"0.64456666",
"0.6441375",
"0.6440285",
"0.641208",
"0.6408471",
"0.63862234",
"0.63704604",
"0.63694197",
"0.6368701",
"0.63676447",
"0.6361184",
"0.6361014",
"0.63428044",
"0.6334782",
"0.63175815",
"0.63135326",
"0.63101983",
"0.6308569",
"0.6286867",
"0.6283185",
"0.6282195",
"0.627305",
"0.6270489",
"0.62643135",
"0.62612617",
"0.6253935",
"0.6245245",
"0.6244259",
"0.6241932",
"0.6236328",
"0.62244695",
"0.62239224",
"0.62164813",
"0.6216283",
"0.6215385",
"0.6215274",
"0.62136817",
"0.6212044",
"0.61956024",
"0.6194999",
"0.61920655",
"0.61892503",
"0.61670446",
"0.6158493",
"0.6150511",
"0.6147395",
"0.61406034",
"0.61406034",
"0.61406034",
"0.61406034",
"0.61406034"
] | 0.7858551 | 0 |
itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass | def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkCosImageFilterID3ID3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMD2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS3IUS3IUS3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_cast(obj)",
"def itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHDIF2_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_cast(obj)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID2ID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterID3ID3_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def itkCosImageFilterID2ID2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID2ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF3IF3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)",
"def itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args)",
"def itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF22ID2_Superclass_cast(*args)",
"def cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args):\n return _itkBoundedReciprocalImageFilterPython.itkBoundedReciprocalImageFilterIF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)",
"def cast(*args):\n return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)"
] | [
"0.8716246",
"0.86355853",
"0.86032987",
"0.85149944",
"0.8514654",
"0.8504637",
"0.85039264",
"0.8490741",
"0.8417344",
"0.8411787",
"0.8396364",
"0.8383459",
"0.838299",
"0.8310304",
"0.8255951",
"0.82330203",
"0.82198226",
"0.82180065",
"0.81992364",
"0.8187167",
"0.81810266",
"0.8094176",
"0.7989682",
"0.7905116",
"0.78650725",
"0.77804565",
"0.77492243",
"0.7747317",
"0.7727909",
"0.77024734",
"0.7676982",
"0.7675931",
"0.7663144",
"0.76427585",
"0.76419175",
"0.7640859",
"0.7630106",
"0.76192904",
"0.7606939",
"0.7591958",
"0.7584353",
"0.7580471",
"0.75638765",
"0.75554585",
"0.7554216",
"0.75489354",
"0.7548919",
"0.7541405",
"0.7539749",
"0.7533455",
"0.7506361",
"0.7484503",
"0.74841535",
"0.7480113",
"0.7473402",
"0.74167895",
"0.735574",
"0.7350082",
"0.73254156",
"0.7320643",
"0.7320593",
"0.73131263",
"0.7307321",
"0.73050874",
"0.72866726",
"0.7285737",
"0.7258335",
"0.72555494",
"0.72460353",
"0.7241692",
"0.7241365",
"0.72174865",
"0.7211288",
"0.7210859",
"0.72066045",
"0.72006863",
"0.71830404",
"0.7117859",
"0.7052691",
"0.7051937",
"0.70417666",
"0.70358187",
"0.7035295",
"0.70323056",
"0.7030922",
"0.70294595",
"0.7025296",
"0.70195335",
"0.70194334",
"0.70189357",
"0.6990645",
"0.69811094",
"0.69769037",
"0.6918229",
"0.6894716",
"0.6871534",
"0.6857674",
"0.68126756",
"0.6801817",
"0.6779192"
] | 0.8538841 | 3 |
cast(itkLightObject obj) > itkTernaryAddImageFilterID2ID2ID2ID2 | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVectorExpandImageFilterIVF22IVF22 *\":\n return _itkVectorExpandImageFilterPython.itkVectorExpandImageFilterIVF22IVF22_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF2IF2SE2 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS2ISS2ISS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS2ISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVectorExpandImageFilterIVF32IVF32 *\":\n return _itkVectorExpandImageFilterPython.itkVectorExpandImageFilterIVF32IVF32_cast(obj)",
"def itkBinaryContourImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVectorExpandImageFilterIVF42IVF42 *\":\n return _itkVectorExpandImageFilterPython.itkVectorExpandImageFilterIVF42IVF42_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def itkBinaryGrindPeakImageFilterISS2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkTransformMeshFilterMF2MF2TD22 *\":\n return _itkTransformMeshFilterPython.itkTransformMeshFilterMF2MF2TD22_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)"
] | [
"0.7715046",
"0.7705152",
"0.76006335",
"0.75384825",
"0.74669284",
"0.7435751",
"0.74244475",
"0.73907906",
"0.73766285",
"0.73675126",
"0.7347136",
"0.73369384",
"0.733344",
"0.7333071",
"0.73104805",
"0.730174",
"0.729558",
"0.7290023",
"0.7279606",
"0.7250818",
"0.7246067",
"0.7239658",
"0.7232525",
"0.7219226",
"0.7214132",
"0.72092205",
"0.7206482",
"0.7204064",
"0.7196005",
"0.71868",
"0.71707034",
"0.71463436",
"0.71460146",
"0.7137301",
"0.71239984",
"0.7120324",
"0.710397",
"0.710048",
"0.7095327",
"0.7077529",
"0.7077013",
"0.70627713",
"0.70618886",
"0.70534587",
"0.7046183",
"0.70451343",
"0.7044045",
"0.7038804",
"0.7037973",
"0.7032828",
"0.7029209",
"0.7025092",
"0.7018926",
"0.7013664",
"0.7012003",
"0.70107174",
"0.7009742",
"0.7004514",
"0.69993156",
"0.6989685",
"0.6989066",
"0.69861203",
"0.6980363",
"0.6979226",
"0.6978969",
"0.6977955",
"0.697686",
"0.69739187",
"0.69640243",
"0.69625485",
"0.69585246",
"0.69584495",
"0.69500476",
"0.6944018",
"0.69368184",
"0.6934603",
"0.69317824",
"0.6930937",
"0.69224703",
"0.69219184",
"0.6921745",
"0.6910575",
"0.69103056",
"0.6905736",
"0.6905153",
"0.69027877",
"0.68996876",
"0.6897013",
"0.68950206",
"0.68916494",
"0.68913674",
"0.6888427",
"0.68854207",
"0.68833876",
"0.68819505",
"0.6879572",
"0.68762743",
"0.68761414",
"0.6875917",
"0.68746066"
] | 0.7090828 | 39 |
New() > itkTernaryAddImageFilterID2ID2ID2ID2 Create a new object of the class itkTernaryAddImageFilterID2ID2ID2ID2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterISS2ISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterID2ID2D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF2IF2F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIF2IF2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUS2IUS2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterISS2ISS2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUC2IUC2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.79866374",
"0.7950948",
"0.7902202",
"0.76756924",
"0.7476825",
"0.74265414",
"0.742054",
"0.7409127",
"0.7372828",
"0.73234135",
"0.7315541",
"0.7296767",
"0.7270058",
"0.7263262",
"0.7241665",
"0.7233361",
"0.72253376",
"0.72154576",
"0.715046",
"0.7147575",
"0.71471334",
"0.71427715",
"0.71396875",
"0.7122744",
"0.7106084",
"0.70948124",
"0.70886177",
"0.7087755",
"0.70497006",
"0.70360106",
"0.7034279",
"0.70221037",
"0.70195806",
"0.7015186",
"0.700797",
"0.7002811",
"0.7000634",
"0.69809526",
"0.69782954",
"0.6969094",
"0.69332486",
"0.69201213",
"0.6916795",
"0.691103",
"0.69058627",
"0.6886734",
"0.68822026",
"0.68698335",
"0.6866538",
"0.686236",
"0.68585163",
"0.6848134",
"0.6848083",
"0.6846839",
"0.68401754",
"0.6836066",
"0.68229043",
"0.6820789",
"0.68180424",
"0.68169373",
"0.6808928",
"0.6808654",
"0.68050575",
"0.67977273",
"0.6794003",
"0.6793492",
"0.6782974",
"0.6778659",
"0.6771253",
"0.675786",
"0.67550135",
"0.6742718",
"0.67415965",
"0.67381096",
"0.6736584",
"0.67305213",
"0.67139965",
"0.6712618",
"0.67126113",
"0.6705383",
"0.67036283",
"0.6698088",
"0.6689859",
"0.66855204",
"0.6685025",
"0.66844094",
"0.66830295",
"0.66705674",
"0.66631573",
"0.6655726",
"0.66552275",
"0.6653606",
"0.6641138",
"0.6636095",
"0.66331047",
"0.66284645",
"0.6627201",
"0.6615351",
"0.6606087",
"0.6585147"
] | 0.8147333 | 0 |
itkTernaryAddImageFilterID2ID2ID2ID2_cast(itkLightObject obj) > itkTernaryAddImageFilterID2ID2ID2ID2 | def itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS2ISS2ISS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS2ISS2ISS2_cast(obj)",
"def itkBinaryContourImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkBinaryGrindPeakImageFilterISS2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF2IF2SE2 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def itkSquaredDifferenceImageFilterISS2ISS2ISS2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS2ISS2ISS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS2ISS2ISS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def itkBinaryContourImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkLabelStatisticsImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkVectorExpandImageFilterIVF22IVF22 *\":\n return _itkVectorExpandImageFilterPython.itkVectorExpandImageFilterIVF22IVF22_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkSubtractImageFilterIF2IF2IF2_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_cast(*args)",
"def itkLabelStatisticsImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def itkReinitializeLevelSetImageFilterIF2_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2ISS2_cast(obj)"
] | [
"0.76308167",
"0.758188",
"0.7517058",
"0.7454919",
"0.74101835",
"0.73981917",
"0.7383619",
"0.73698163",
"0.7358109",
"0.7336409",
"0.7328914",
"0.7323717",
"0.7288171",
"0.72586155",
"0.725105",
"0.72349465",
"0.7195933",
"0.7163779",
"0.7159076",
"0.7150028",
"0.7110235",
"0.71037704",
"0.7098279",
"0.70937634",
"0.70878726",
"0.70763785",
"0.7071058",
"0.7063865",
"0.7051465",
"0.7048067",
"0.7044855",
"0.704411",
"0.70333326",
"0.7033006",
"0.70224947",
"0.7016516",
"0.70031065",
"0.69963",
"0.69842577",
"0.69647086",
"0.69558823",
"0.6952095",
"0.694647",
"0.6945052",
"0.6944542",
"0.69433457",
"0.69387585",
"0.6922828",
"0.6920368",
"0.69171256",
"0.6914657",
"0.69134116",
"0.6912645",
"0.69099784",
"0.6898102",
"0.68935853",
"0.6890169",
"0.6882698",
"0.688172",
"0.6879938",
"0.6870878",
"0.6868541",
"0.6863564",
"0.68626666",
"0.6860922",
"0.6860221",
"0.6860014",
"0.6859592",
"0.6855664",
"0.6824769",
"0.6820064",
"0.6811712",
"0.68085706",
"0.6807414",
"0.6806517",
"0.68063784",
"0.68028396",
"0.68027973",
"0.6802466",
"0.6773515",
"0.67714643",
"0.6770181",
"0.6769686",
"0.6768559",
"0.67652464",
"0.67648613",
"0.6764369",
"0.6759935",
"0.6754787",
"0.6742307",
"0.6740574",
"0.6739595",
"0.673923",
"0.6735731",
"0.67353016",
"0.6731861",
"0.6714607",
"0.67092407",
"0.67049724",
"0.6704581"
] | 0.7765551 | 0 |
cast(itkLightObject obj) > itkTernaryAddImageFilterID3ID3ID3ID3 | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkBinaryContourImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIUC3IUC3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUC3IUC3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIUS3IUS3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUS3IUS3SE3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVectorExpandImageFilterIVF32IVF32 *\":\n return _itkVectorExpandImageFilterPython.itkVectorExpandImageFilterIVF32IVF32_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterISS3ISS3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterISS3ISS3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkLabelStatisticsImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def itkBinaryGrindPeakImageFilterISS3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterISS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVectorExpandImageFilterIVF33IVF33 *\":\n return _itkVectorExpandImageFilterPython.itkVectorExpandImageFilterIVF33IVF33_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)"
] | [
"0.7741984",
"0.7713559",
"0.77034396",
"0.76531625",
"0.76131505",
"0.7598604",
"0.75758034",
"0.7566732",
"0.75606537",
"0.75531316",
"0.75246245",
"0.749187",
"0.7488528",
"0.74811006",
"0.74808985",
"0.74759084",
"0.74668235",
"0.74441904",
"0.7443954",
"0.7413306",
"0.74001694",
"0.73931956",
"0.7389591",
"0.7373922",
"0.7368931",
"0.73617274",
"0.7357486",
"0.7350102",
"0.7343565",
"0.7340307",
"0.7328906",
"0.732763",
"0.7315087",
"0.7292586",
"0.72898376",
"0.72763216",
"0.72727907",
"0.72708356",
"0.725343",
"0.7245992",
"0.72406733",
"0.72372895",
"0.7233177",
"0.72126",
"0.72111475",
"0.72080207",
"0.7196281",
"0.7177274",
"0.7165927",
"0.7137297",
"0.7137167",
"0.7120892",
"0.71144176",
"0.7111313",
"0.7106497",
"0.7093306",
"0.7090764",
"0.70878273",
"0.7087123",
"0.7083045",
"0.7082154",
"0.7064439",
"0.70609885",
"0.7058356",
"0.70496",
"0.7045726",
"0.7044562",
"0.70387936",
"0.703778",
"0.7037588",
"0.7024498",
"0.70226574",
"0.70126486",
"0.7011286",
"0.70078033",
"0.70029294",
"0.7002347",
"0.6997699",
"0.6995794",
"0.6994806",
"0.69939727",
"0.6991978",
"0.69880956",
"0.69878864",
"0.6985433",
"0.6983016",
"0.69793636",
"0.69785315",
"0.6977937",
"0.6977736",
"0.6975831",
"0.6974378",
"0.69698197",
"0.6968857",
"0.6968754",
"0.6962507",
"0.6961416",
"0.69601357",
"0.6959342",
"0.6958294"
] | 0.7174502 | 48 |
New() > itkTernaryAddImageFilterID3ID3ID3ID3 Create a new object of the class itkTernaryAddImageFilterID3ID3ID3ID3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterID3ID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterISS3ISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIF3IF3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUS3IUS3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterISS3ISS3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUC3IUC3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.8122643",
"0.80405945",
"0.79705286",
"0.7826779",
"0.7583411",
"0.75830215",
"0.7572595",
"0.756218",
"0.74430597",
"0.74388206",
"0.7398677",
"0.73933834",
"0.7389276",
"0.7356299",
"0.73425376",
"0.732044",
"0.73061585",
"0.7295671",
"0.7292391",
"0.7291407",
"0.7245295",
"0.7238673",
"0.72325724",
"0.7228048",
"0.72212756",
"0.7216588",
"0.72103286",
"0.7194369",
"0.7192333",
"0.719084",
"0.7169697",
"0.7161537",
"0.71587485",
"0.715798",
"0.7156938",
"0.7135225",
"0.7134959",
"0.7115595",
"0.7106025",
"0.71040004",
"0.7102304",
"0.7062251",
"0.7057906",
"0.7041161",
"0.70404285",
"0.70385736",
"0.70325977",
"0.70279145",
"0.702689",
"0.700549",
"0.70039433",
"0.7003882",
"0.69990665",
"0.6981284",
"0.69759804",
"0.6968598",
"0.69640493",
"0.6955246",
"0.69519854",
"0.6941085",
"0.6935707",
"0.69276685",
"0.6927236",
"0.6922532",
"0.6921327",
"0.6921195",
"0.6918824",
"0.6911508",
"0.69084305",
"0.6886724",
"0.6880893",
"0.6851334",
"0.68479407",
"0.6844937",
"0.6839733",
"0.68392444",
"0.683831",
"0.68295383",
"0.6825444",
"0.68237287",
"0.68222",
"0.682003",
"0.6813554",
"0.68117356",
"0.67895186",
"0.67855406",
"0.6774384",
"0.6772221",
"0.6764194",
"0.67626613",
"0.67526233",
"0.67496186",
"0.6743926",
"0.6741848",
"0.67385024",
"0.67210406",
"0.6716653",
"0.67118007",
"0.6707055",
"0.67061895"
] | 0.81664336 | 0 |
itkTernaryAddImageFilterID3ID3ID3ID3_cast(itkLightObject obj) > itkTernaryAddImageFilterID3ID3ID3ID3 | def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkBinaryContourImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_cast(*args)",
"def itkBinaryGrindPeakImageFilterIUC3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def itkLabelStatisticsImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def itkScalarImageKmeansImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3ISS3_cast(obj)",
"def itkBinaryGrindPeakImageFilterISS3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS3_cast(obj)",
"def itkReinitializeLevelSetImageFilterIF3_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def itkSubtractImageFilterID3ID3ID3_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_cast(*args)",
"def itkBinaryContourImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIUC3IUC3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUC3IUC3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkTransformMeshFilterMF3MF3TD33 *\":\n return _itkTransformMeshFilterPython.itkTransformMeshFilterMF3MF3TD33_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)"
] | [
"0.78832257",
"0.785561",
"0.77852875",
"0.7771489",
"0.7718299",
"0.7687708",
"0.76786035",
"0.7662282",
"0.7625325",
"0.7598742",
"0.7597835",
"0.7562758",
"0.7558518",
"0.75351113",
"0.75327724",
"0.7522676",
"0.75222576",
"0.7520495",
"0.75182086",
"0.7504889",
"0.7499525",
"0.748609",
"0.74735343",
"0.74709594",
"0.7463708",
"0.7444209",
"0.74399316",
"0.7431289",
"0.7430193",
"0.7428997",
"0.74288005",
"0.73883873",
"0.73790455",
"0.7288585",
"0.72843224",
"0.72799325",
"0.72763085",
"0.7256809",
"0.72504926",
"0.72379214",
"0.7233447",
"0.722093",
"0.72146505",
"0.72082233",
"0.7204853",
"0.7197672",
"0.7197474",
"0.7194331",
"0.71905667",
"0.71898335",
"0.71762836",
"0.71563715",
"0.7156196",
"0.7152928",
"0.7151354",
"0.71468294",
"0.7113009",
"0.71070427",
"0.7093406",
"0.7080203",
"0.7077373",
"0.70758384",
"0.70694464",
"0.7066673",
"0.70662546",
"0.705661",
"0.7049776",
"0.70417774",
"0.7035586",
"0.7035538",
"0.702926",
"0.70252526",
"0.7024534",
"0.7017805",
"0.7015965",
"0.7009342",
"0.7007598",
"0.70049685",
"0.69993937",
"0.6981672",
"0.69802284",
"0.6979127",
"0.69741684",
"0.6968076",
"0.6964483",
"0.6961786",
"0.6961713",
"0.6957803",
"0.6945631",
"0.69401526",
"0.6932658",
"0.6922638",
"0.6909305",
"0.689651",
"0.6896498",
"0.6888337",
"0.6881735",
"0.6880073",
"0.68762094",
"0.6871774"
] | 0.80553585 | 0 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIF2IF2IF2IF2 | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def itkLabelStatisticsImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF2IF2SE2 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkBinaryContourImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def itkBinaryContourImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def itkReinitializeLevelSetImageFilterIF2_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkIsoDataThresholdCalculatorHFF *\":\n return _itkIsoDataThresholdCalculatorPython.itkIsoDataThresholdCalculatorHFF_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)"
] | [
"0.8116733",
"0.8000364",
"0.7750954",
"0.77352816",
"0.76985484",
"0.7662705",
"0.76402336",
"0.7612111",
"0.75822896",
"0.7576933",
"0.74957603",
"0.74747777",
"0.7450279",
"0.74031514",
"0.7363972",
"0.7326749",
"0.73242706",
"0.73232335",
"0.73223644",
"0.73027104",
"0.73011816",
"0.72858775",
"0.72809196",
"0.7280699",
"0.727146",
"0.7260606",
"0.7249125",
"0.7233078",
"0.72290546",
"0.7222931",
"0.72223073",
"0.7212111",
"0.72115594",
"0.72030604",
"0.7179329",
"0.7176801",
"0.7167274",
"0.71650714",
"0.71550876",
"0.7148239",
"0.7144028",
"0.7142399",
"0.71374553",
"0.7134691",
"0.7126228",
"0.7118288",
"0.7116772",
"0.71165353",
"0.7115221",
"0.7106644",
"0.71063334",
"0.7101298",
"0.7091122",
"0.70756155",
"0.70726615",
"0.7061981",
"0.70554566",
"0.7048818",
"0.70341897",
"0.7031976",
"0.7027509",
"0.70205915",
"0.7019109",
"0.7018743",
"0.70139295",
"0.7013149",
"0.7008694",
"0.700357",
"0.6999409",
"0.6984428",
"0.6975545",
"0.6972586",
"0.6971805",
"0.6969953",
"0.6968238",
"0.6963814",
"0.6960825",
"0.6950264",
"0.69451696",
"0.6943234",
"0.69431037",
"0.69323903",
"0.69239295",
"0.69114894",
"0.69067085",
"0.690106",
"0.6895569",
"0.6891624",
"0.6889799",
"0.6887634",
"0.68865436",
"0.68823916",
"0.6880086",
"0.6874922",
"0.68747365",
"0.68743086",
"0.68743",
"0.6869821",
"0.685526",
"0.68530166"
] | 0.7416886 | 13 |
New() > itkTernaryAddImageFilterIF2IF2IF2IF2 Create a new object of the class itkTernaryAddImageFilterIF2IF2IF2IF2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF2IF2F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIF2IF2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterISS2ISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterISS2ISS2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUS2IUS2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUC2IUC2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelShapeOpeningImageFilterIUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.8164579",
"0.796104",
"0.7792533",
"0.76417816",
"0.74947166",
"0.7469685",
"0.7461249",
"0.745837",
"0.7394439",
"0.73799187",
"0.7362876",
"0.735769",
"0.7347056",
"0.73209006",
"0.73039687",
"0.7292726",
"0.72827333",
"0.72694397",
"0.7240936",
"0.7170967",
"0.7154845",
"0.7142686",
"0.713682",
"0.7132094",
"0.7106825",
"0.71029645",
"0.7097051",
"0.70945853",
"0.70834047",
"0.7050367",
"0.70498383",
"0.7042348",
"0.7041109",
"0.7004414",
"0.6996226",
"0.69948304",
"0.6992843",
"0.69918644",
"0.6974554",
"0.6970821",
"0.69669527",
"0.6965708",
"0.6950734",
"0.694602",
"0.69409937",
"0.6936093",
"0.6912252",
"0.69118595",
"0.6896375",
"0.6879695",
"0.686943",
"0.6866021",
"0.6862645",
"0.6859999",
"0.6852015",
"0.68422985",
"0.6837193",
"0.6835212",
"0.67903703",
"0.6783067",
"0.67748547",
"0.6763816",
"0.6755442",
"0.6753086",
"0.67487466",
"0.6746463",
"0.67436355",
"0.6741867",
"0.6731391",
"0.67293423",
"0.6713459",
"0.6710439",
"0.6705679",
"0.67027277",
"0.6702006",
"0.6700425",
"0.669443",
"0.6686239",
"0.668174",
"0.66767377",
"0.6672747",
"0.6665471",
"0.6660504",
"0.66555256",
"0.664691",
"0.66468805",
"0.6643444",
"0.6629656",
"0.6610186",
"0.6609489",
"0.66062",
"0.6599953",
"0.65927464",
"0.6578505",
"0.6578059",
"0.6577931",
"0.6574155",
"0.65702224",
"0.6563454",
"0.6561587"
] | 0.8288121 | 0 |
itkTernaryAddImageFilterIF2IF2IF2IF2_cast(itkLightObject obj) > itkTernaryAddImageFilterIF2IF2IF2IF2 | def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkSubtractImageFilterIF2IF2IF2_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_cast(*args)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF2IF2SE2 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkBinaryContourImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkBinaryContourImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def itkReinitializeLevelSetImageFilterIF2_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_cast(*args)",
"def itkLabelStatisticsImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2ISS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS2ISS2ISS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS2ISS2ISS2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def itkBinaryGrindPeakImageFilterISS2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUC2_cast(obj)",
"def itkClosingByReconstructionImageFilterIF2IF2SE2_cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF2IF2SE2 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIUC2IUC2SE2 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUC2IUC2SE2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)"
] | [
"0.8325339",
"0.81298554",
"0.7900186",
"0.789129",
"0.7829227",
"0.7820389",
"0.78154504",
"0.7814893",
"0.7799674",
"0.7781515",
"0.77740836",
"0.7751609",
"0.77474713",
"0.7740967",
"0.77069825",
"0.7687321",
"0.7653217",
"0.7551432",
"0.75447375",
"0.7498614",
"0.74841386",
"0.7444034",
"0.7427243",
"0.74118775",
"0.74117213",
"0.7411362",
"0.74018717",
"0.74007183",
"0.7393822",
"0.7392059",
"0.7387287",
"0.73863703",
"0.7384837",
"0.73761004",
"0.7361443",
"0.7358703",
"0.7345819",
"0.73353446",
"0.7323802",
"0.7319697",
"0.7315256",
"0.73081553",
"0.7298402",
"0.72937584",
"0.72924995",
"0.728923",
"0.7271208",
"0.7267614",
"0.7267042",
"0.7255984",
"0.724925",
"0.7248608",
"0.72453016",
"0.7238084",
"0.7233037",
"0.7231691",
"0.72258145",
"0.72222894",
"0.72174716",
"0.7211547",
"0.72086245",
"0.7192574",
"0.7187997",
"0.7187229",
"0.71865803",
"0.71821123",
"0.718103",
"0.7169083",
"0.7164535",
"0.71288574",
"0.7124888",
"0.71220917",
"0.7111234",
"0.7106584",
"0.70966595",
"0.7096346",
"0.70958686",
"0.7077354",
"0.7070533",
"0.70611393",
"0.7052449",
"0.7039578",
"0.70374054",
"0.70367783",
"0.7036065",
"0.7020758",
"0.701971",
"0.7019302",
"0.7018832",
"0.7012687",
"0.70101696",
"0.70097816",
"0.7007046",
"0.6997733",
"0.69943976",
"0.6993474",
"0.6987534",
"0.6987067",
"0.6985634",
"0.69849044"
] | 0.82992935 | 1 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIF3IF3IF3IF3 | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkBinaryContourImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkLabelStatisticsImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC3_cast(obj)",
"def itkReinitializeLevelSetImageFilterIF3_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def itkBinaryContourImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def itkScalarImageToRunLengthFeaturesFilterIF3_cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIUC3IUC3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUC3IUC3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterISS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterISS3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3ISS3_cast(obj)"
] | [
"0.81073576",
"0.80122215",
"0.7959463",
"0.78997636",
"0.7873615",
"0.7828453",
"0.7827501",
"0.7825715",
"0.7757871",
"0.7743137",
"0.77404636",
"0.77255476",
"0.7713849",
"0.7702168",
"0.764673",
"0.7637698",
"0.76347834",
"0.7630268",
"0.7629711",
"0.7618248",
"0.7616702",
"0.7615652",
"0.76044214",
"0.7599852",
"0.75908303",
"0.75825596",
"0.7575792",
"0.7567679",
"0.75657547",
"0.7560849",
"0.75224346",
"0.75114894",
"0.7495",
"0.7487061",
"0.7444267",
"0.74125016",
"0.7400533",
"0.7398794",
"0.7398389",
"0.7393008",
"0.73590523",
"0.7357347",
"0.7340954",
"0.7340626",
"0.7330342",
"0.7314639",
"0.7301703",
"0.72929275",
"0.72717506",
"0.7267301",
"0.72594845",
"0.7255204",
"0.72388226",
"0.7223932",
"0.72216874",
"0.7220349",
"0.7207416",
"0.71987975",
"0.71984816",
"0.7174852",
"0.7172126",
"0.7160937",
"0.7156928",
"0.71554166",
"0.7150386",
"0.7147694",
"0.71435684",
"0.7135912",
"0.7129853",
"0.71240485",
"0.7108838",
"0.7107222",
"0.71069515",
"0.7093807",
"0.7090841",
"0.7080017",
"0.70764977",
"0.7075556",
"0.70668083",
"0.70640457",
"0.7061455",
"0.7061285",
"0.7058356",
"0.705735",
"0.70486355",
"0.7046976",
"0.7042452",
"0.70417815",
"0.70362353",
"0.7028816",
"0.70204306",
"0.7018831",
"0.7018108",
"0.70134586",
"0.7012067",
"0.70101887",
"0.6999308",
"0.698969",
"0.6984446",
"0.6983546"
] | 0.74116236 | 36 |
New() > itkTernaryAddImageFilterIF3IF3IF3IF3 Create a new object of the class itkTernaryAddImageFilterIF3IF3IF3IF3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterID3ID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIF3IF3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterISS3ISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUS3IUS3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterISS3ISS3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUC3IUC3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.81593454",
"0.8019483",
"0.77812564",
"0.7755564",
"0.76374006",
"0.7600967",
"0.7552205",
"0.75319076",
"0.75210726",
"0.75069284",
"0.7500817",
"0.74600273",
"0.7445691",
"0.7439426",
"0.74359894",
"0.7410714",
"0.7394769",
"0.7375656",
"0.73547715",
"0.73338944",
"0.7309137",
"0.7308926",
"0.7308153",
"0.73044044",
"0.7296827",
"0.72965664",
"0.7272183",
"0.72698",
"0.7251409",
"0.7247766",
"0.7213897",
"0.72101766",
"0.7206172",
"0.7200399",
"0.71874034",
"0.71724975",
"0.7168517",
"0.71574616",
"0.71271783",
"0.7123028",
"0.71164954",
"0.7109218",
"0.7100107",
"0.7088822",
"0.70731014",
"0.7072046",
"0.70705444",
"0.7069467",
"0.7058877",
"0.70385194",
"0.7027688",
"0.7026507",
"0.7024603",
"0.7005804",
"0.69932973",
"0.6993251",
"0.6987232",
"0.6970017",
"0.6962639",
"0.6948045",
"0.69480145",
"0.6939465",
"0.6933917",
"0.6925334",
"0.69006103",
"0.6899851",
"0.689017",
"0.68862075",
"0.6865627",
"0.68652135",
"0.68549585",
"0.68366516",
"0.68363196",
"0.6828748",
"0.6827118",
"0.6808224",
"0.6806314",
"0.6789765",
"0.6787468",
"0.67785954",
"0.67745316",
"0.6772061",
"0.67641574",
"0.67542624",
"0.67446053",
"0.67345935",
"0.6733767",
"0.67210615",
"0.67207956",
"0.6719726",
"0.6718016",
"0.67127067",
"0.670944",
"0.6708906",
"0.6707384",
"0.6705261",
"0.6704019",
"0.67031676",
"0.6701461",
"0.67005044"
] | 0.83370554 | 0 |
itkTernaryAddImageFilterIF3IF3IF3IF3_cast(itkLightObject obj) > itkTernaryAddImageFilterIF3IF3IF3IF3 | def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def itkBinaryContourImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_cast(*args)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkReinitializeLevelSetImageFilterIF3_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def itkBinaryContourImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3ISS3_cast(obj)",
"def itkScalarImageKmeansImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def itkBinaryGrindPeakImageFilterISS3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def itkSpeckleNoiseImageFilterIF3IF3_cast(*args):\n return _itkSpeckleNoiseImageFilterPython.itkSpeckleNoiseImageFilterIF3IF3_cast(*args)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIUC3IUC3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUC3IUC3SE3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)"
] | [
"0.8314334",
"0.8146819",
"0.81391484",
"0.8064029",
"0.8047938",
"0.80301446",
"0.8027858",
"0.80160296",
"0.8015273",
"0.7977002",
"0.79115695",
"0.7899026",
"0.7853632",
"0.78422785",
"0.78316253",
"0.78305656",
"0.78131515",
"0.7811875",
"0.7801812",
"0.77994573",
"0.7799187",
"0.7798523",
"0.7791438",
"0.779052",
"0.7758929",
"0.7754176",
"0.7751458",
"0.7724332",
"0.77170706",
"0.7716697",
"0.7693164",
"0.76809776",
"0.75991035",
"0.7585828",
"0.7577839",
"0.75757647",
"0.7575525",
"0.7539066",
"0.7520594",
"0.7503685",
"0.74971956",
"0.749409",
"0.74892646",
"0.74885696",
"0.74698603",
"0.74580723",
"0.7447573",
"0.74468046",
"0.7446476",
"0.74400526",
"0.74364775",
"0.7429364",
"0.7420915",
"0.74128425",
"0.73949605",
"0.7388024",
"0.7381447",
"0.7347345",
"0.7347232",
"0.7345982",
"0.73386824",
"0.7334201",
"0.73261905",
"0.7310171",
"0.7301234",
"0.7298117",
"0.7294099",
"0.72829443",
"0.7275311",
"0.7267214",
"0.72622365",
"0.7260174",
"0.72588724",
"0.72586495",
"0.725332",
"0.72524095",
"0.72501874",
"0.724879",
"0.72428405",
"0.72358626",
"0.7232067",
"0.72167987",
"0.72162634",
"0.7212631",
"0.71980083",
"0.7191569",
"0.71869665",
"0.7172625",
"0.7168623",
"0.7153479",
"0.7143695",
"0.71364814",
"0.7127574",
"0.7125861",
"0.71191746",
"0.7117529",
"0.7110544",
"0.7107414",
"0.7105062",
"0.7100203"
] | 0.83667874 | 0 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUC2IUC2IUC2IUC2 | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkBinaryContourImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS2IUS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)"
] | [
"0.78606176",
"0.77856857",
"0.7734099",
"0.7726301",
"0.7724931",
"0.7701192",
"0.764721",
"0.7644028",
"0.7607412",
"0.7591791",
"0.75696933",
"0.75436753",
"0.7526074",
"0.75034356",
"0.75017846",
"0.7489283",
"0.7467527",
"0.74643415",
"0.74634385",
"0.74435395",
"0.74328667",
"0.74318475",
"0.74309736",
"0.74261254",
"0.741429",
"0.7409343",
"0.74069965",
"0.7395981",
"0.7393146",
"0.7384891",
"0.735888",
"0.7354098",
"0.73536086",
"0.7346661",
"0.73443294",
"0.734284",
"0.7339023",
"0.732766",
"0.7316886",
"0.73093164",
"0.72859395",
"0.72800064",
"0.7279866",
"0.72788393",
"0.72654885",
"0.72620934",
"0.7260285",
"0.72551364",
"0.72531104",
"0.7251299",
"0.7250782",
"0.7241258",
"0.7231552",
"0.72300875",
"0.72184193",
"0.7196654",
"0.7185909",
"0.7183118",
"0.71827435",
"0.71775204",
"0.7176795",
"0.71758527",
"0.7175216",
"0.7164795",
"0.7163413",
"0.7159153",
"0.7158397",
"0.7156658",
"0.71559894",
"0.7150628",
"0.7142653",
"0.71367776",
"0.7128944",
"0.7121203",
"0.711502",
"0.7109604",
"0.7109463",
"0.710889",
"0.7102213",
"0.70928067",
"0.7089787",
"0.7080706",
"0.7073202",
"0.7071919",
"0.70649683",
"0.706483",
"0.70546645",
"0.7043858",
"0.70402455",
"0.7032236",
"0.702571",
"0.70246136",
"0.7015435",
"0.701298",
"0.7012117",
"0.7011558",
"0.70115405",
"0.70092064",
"0.7007658",
"0.6994884"
] | 0.71642035 | 64 |
New() > itkTernaryAddImageFilterIUC2IUC2IUC2IUC2 Create a new object of the class itkTernaryAddImageFilterIUC2IUC2IUC2IUC2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUC2IUC2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryGrindPeakImageFilterIUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageToRunLengthFeaturesFilterIUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkPeakSignalToNoiseRatioCalculatorIUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelShapeOpeningImageFilterIUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.80834025",
"0.79318345",
"0.7846418",
"0.7789117",
"0.7781465",
"0.76959866",
"0.7689662",
"0.7657546",
"0.76215744",
"0.7617233",
"0.76065916",
"0.75819594",
"0.7497899",
"0.7480199",
"0.7466372",
"0.7465572",
"0.7454194",
"0.74479383",
"0.74334204",
"0.7425108",
"0.74227524",
"0.74216574",
"0.74011093",
"0.73974645",
"0.7395684",
"0.7388915",
"0.7365073",
"0.73386604",
"0.73286253",
"0.73279756",
"0.7321265",
"0.7311295",
"0.73003364",
"0.7284313",
"0.72834927",
"0.7278828",
"0.72597975",
"0.7258461",
"0.725689",
"0.72358805",
"0.7197532",
"0.71970266",
"0.71935886",
"0.7189022",
"0.7180814",
"0.7180519",
"0.7178658",
"0.7124568",
"0.7112072",
"0.71114004",
"0.7098755",
"0.70984244",
"0.70948905",
"0.70866627",
"0.7079036",
"0.7069102",
"0.70687056",
"0.70658964",
"0.7055367",
"0.7054349",
"0.7047952",
"0.70414317",
"0.7036108",
"0.70351946",
"0.7027111",
"0.7026962",
"0.7006632",
"0.7005742",
"0.700524",
"0.69914496",
"0.6985727",
"0.6981789",
"0.69801027",
"0.69666326",
"0.6964613",
"0.6962286",
"0.6959792",
"0.6958457",
"0.69542176",
"0.6934637",
"0.6928677",
"0.692845",
"0.691969",
"0.69153976",
"0.6910176",
"0.69035786",
"0.6887882",
"0.6884784",
"0.68765855",
"0.68748",
"0.6869459",
"0.6865101",
"0.686326",
"0.68477386",
"0.68408394",
"0.6838985",
"0.6831174",
"0.6827025",
"0.6825762",
"0.68245506"
] | 0.82164854 | 0 |
itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(itkLightObject obj) > itkTernaryAddImageFilterIUC2IUC2IUC2IUC2 | def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkBinaryContourImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUC2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)"
] | [
"0.7892496",
"0.78112376",
"0.77925205",
"0.77598757",
"0.77170867",
"0.76765",
"0.764932",
"0.7625104",
"0.7615173",
"0.7612952",
"0.7601345",
"0.7593909",
"0.75822866",
"0.7575358",
"0.7571743",
"0.7553365",
"0.75141734",
"0.74964494",
"0.7489112",
"0.7484641",
"0.7465378",
"0.7461833",
"0.7454941",
"0.7440599",
"0.74391294",
"0.74350655",
"0.7432016",
"0.74281514",
"0.74166363",
"0.7403122",
"0.7399751",
"0.7373487",
"0.7366462",
"0.7364078",
"0.735911",
"0.73562956",
"0.7350446",
"0.7349883",
"0.73374903",
"0.73356324",
"0.7323264",
"0.7313239",
"0.7277515",
"0.72677016",
"0.72672445",
"0.7260342",
"0.72589827",
"0.72498256",
"0.72495544",
"0.7232879",
"0.72326183",
"0.72145975",
"0.72044396",
"0.7204164",
"0.7202962",
"0.7177709",
"0.7167207",
"0.7159295",
"0.7158796",
"0.715723",
"0.7152748",
"0.71511334",
"0.71479446",
"0.71428627",
"0.7131654",
"0.71268487",
"0.7121917",
"0.7109887",
"0.7107595",
"0.7104852",
"0.7097879",
"0.7091903",
"0.70843357",
"0.70822084",
"0.7078414",
"0.7072232",
"0.70714366",
"0.7068975",
"0.7058581",
"0.70535547",
"0.70500374",
"0.7049906",
"0.7042649",
"0.7036417",
"0.70234543",
"0.7017131",
"0.7007216",
"0.69923323",
"0.6992151",
"0.69901574",
"0.6984286",
"0.69811994",
"0.6981102",
"0.6970071",
"0.6968539",
"0.69663334",
"0.69652337",
"0.6965078",
"0.6956621",
"0.6954461"
] | 0.7989687 | 0 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUC3IUC3IUC3IUC3 | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkBinaryContourImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)"
] | [
"0.77693975",
"0.7703127",
"0.76897246",
"0.76749665",
"0.76679075",
"0.7648735",
"0.7646133",
"0.75900036",
"0.7586561",
"0.7576648",
"0.7568216",
"0.75426763",
"0.7539724",
"0.7532106",
"0.7513516",
"0.74965817",
"0.7474853",
"0.7471096",
"0.74622446",
"0.7437819",
"0.74244404",
"0.7414886",
"0.7412749",
"0.74103",
"0.7403834",
"0.74011654",
"0.7399833",
"0.7397393",
"0.7395434",
"0.73926723",
"0.7391948",
"0.73847777",
"0.7380798",
"0.7365211",
"0.7364548",
"0.7356582",
"0.7355647",
"0.7345971",
"0.7339377",
"0.73384094",
"0.7317732",
"0.731097",
"0.7299428",
"0.7298134",
"0.72873944",
"0.7286428",
"0.7265992",
"0.72581494",
"0.7255384",
"0.7251413",
"0.7245706",
"0.7238003",
"0.7232991",
"0.7230324",
"0.72225434",
"0.7219765",
"0.72171766",
"0.72114414",
"0.72082734",
"0.7208033",
"0.72069514",
"0.72062194",
"0.72036767",
"0.7203282",
"0.72017044",
"0.7198469",
"0.7184015",
"0.71835244",
"0.71813697",
"0.7181174",
"0.7166037",
"0.71651816",
"0.71646595",
"0.71604156",
"0.7150072",
"0.7143285",
"0.7136067",
"0.7132849",
"0.712965",
"0.71102434",
"0.71072054",
"0.7106346",
"0.710061",
"0.70988154",
"0.70954293",
"0.70946974",
"0.7091442",
"0.7091132",
"0.70905113",
"0.70696235",
"0.7068046",
"0.7065262",
"0.70635223",
"0.7063261",
"0.70581293",
"0.7054291",
"0.7054155",
"0.70520365",
"0.7049452",
"0.70485544"
] | 0.71232 | 79 |
New() > itkTernaryAddImageFilterIUC3IUC3IUC3IUC3 Create a new object of the class itkTernaryAddImageFilterIUC3IUC3IUC3IUC3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUC3IUC3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.81388545",
"0.810595",
"0.8090037",
"0.7969986",
"0.7956292",
"0.78857464",
"0.785059",
"0.77959174",
"0.77761877",
"0.7631814",
"0.75843716",
"0.7583665",
"0.7542179",
"0.75403064",
"0.7498079",
"0.74783415",
"0.74772173",
"0.74764365",
"0.7475445",
"0.7461655",
"0.7461627",
"0.7452697",
"0.74179375",
"0.74173796",
"0.74080294",
"0.7403118",
"0.7399328",
"0.73914623",
"0.7391003",
"0.73779005",
"0.7375026",
"0.7369553",
"0.73658526",
"0.7365635",
"0.73522466",
"0.7349414",
"0.7349228",
"0.7346489",
"0.7339408",
"0.72976065",
"0.7290865",
"0.7284937",
"0.72780603",
"0.7269416",
"0.7259199",
"0.72529507",
"0.7239068",
"0.7236154",
"0.7235399",
"0.7233468",
"0.72316086",
"0.7224426",
"0.72176945",
"0.7214632",
"0.7208014",
"0.7199313",
"0.719411",
"0.7193506",
"0.71875584",
"0.7178673",
"0.71731156",
"0.7169",
"0.7165535",
"0.7153551",
"0.7153157",
"0.71508044",
"0.7140974",
"0.7133741",
"0.71091515",
"0.7101385",
"0.70978326",
"0.7096911",
"0.709474",
"0.70905685",
"0.7085851",
"0.70757383",
"0.70651615",
"0.7064959",
"0.7061307",
"0.7061222",
"0.7060728",
"0.7057416",
"0.7057057",
"0.705392",
"0.7050168",
"0.70493335",
"0.70467985",
"0.7040717",
"0.70306975",
"0.7028926",
"0.7014248",
"0.69956744",
"0.6989241",
"0.6972773",
"0.69672084",
"0.6960314",
"0.69601876",
"0.69574344",
"0.6946318",
"0.69328964"
] | 0.82122886 | 0 |
itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(itkLightObject obj) > itkTernaryAddImageFilterIUC3IUC3IUC3IUC3 | def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkBinaryContourImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def itkLabelStatisticsImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkBinaryContourImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIUC3IUC3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUC3IUC3SE3_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)"
] | [
"0.7892304",
"0.78337157",
"0.78077155",
"0.7804674",
"0.77679163",
"0.7764134",
"0.7759513",
"0.7749586",
"0.7723623",
"0.7722778",
"0.77218026",
"0.7688984",
"0.7677636",
"0.76723707",
"0.76719034",
"0.76536036",
"0.7652875",
"0.7634969",
"0.76083463",
"0.7607304",
"0.7603553",
"0.76028025",
"0.7595124",
"0.75915504",
"0.7586499",
"0.7568826",
"0.7559071",
"0.755819",
"0.75473166",
"0.75273514",
"0.75236416",
"0.7520576",
"0.7513379",
"0.75077784",
"0.7490932",
"0.7485017",
"0.747257",
"0.7457757",
"0.74547446",
"0.74221396",
"0.74001104",
"0.738859",
"0.7380339",
"0.7380002",
"0.7376764",
"0.7376251",
"0.73746884",
"0.73735344",
"0.7355729",
"0.7340568",
"0.7324074",
"0.7320929",
"0.73191255",
"0.73062766",
"0.73018354",
"0.72987634",
"0.72974455",
"0.72749853",
"0.7271546",
"0.72696036",
"0.72638893",
"0.7259789",
"0.72431505",
"0.7240082",
"0.72396195",
"0.7226184",
"0.72223645",
"0.7202337",
"0.71903336",
"0.718663",
"0.71841663",
"0.7183155",
"0.7180323",
"0.7173323",
"0.7170511",
"0.7167832",
"0.7162349",
"0.716127",
"0.71553874",
"0.7150804",
"0.71504456",
"0.714621",
"0.71364623",
"0.7131022",
"0.71159106",
"0.7113387",
"0.71088445",
"0.7108263",
"0.71064097",
"0.71002245",
"0.7093912",
"0.7091901",
"0.7068374",
"0.70670694",
"0.7045886",
"0.7044672",
"0.7035609",
"0.7018981",
"0.7013456",
"0.7013035"
] | 0.81195134 | 0 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUL2IUL2IUL2IUL2 | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def itkBinaryContourImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkIsoDataThresholdCalculatorHFF *\":\n return _itkIsoDataThresholdCalculatorPython.itkIsoDataThresholdCalculatorHFF_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)"
] | [
"0.7591891",
"0.74914503",
"0.7475746",
"0.7469472",
"0.7456874",
"0.73987305",
"0.7355603",
"0.73495823",
"0.7347236",
"0.7315157",
"0.7291294",
"0.7271386",
"0.72556365",
"0.7250248",
"0.7243723",
"0.72239333",
"0.72214377",
"0.7204429",
"0.71994066",
"0.7191831",
"0.7185622",
"0.71828616",
"0.71825826",
"0.7160285",
"0.7158332",
"0.7154531",
"0.71477926",
"0.7145402",
"0.7137166",
"0.7136279",
"0.71323633",
"0.7130802",
"0.7121939",
"0.71164364",
"0.7109909",
"0.7099747",
"0.7094512",
"0.709448",
"0.7088279",
"0.7082735",
"0.7078342",
"0.7077806",
"0.70751023",
"0.7074672",
"0.70741177",
"0.7069883",
"0.7060261",
"0.7051036",
"0.70354825",
"0.70284045",
"0.70273775",
"0.70261747",
"0.70158637",
"0.7015333",
"0.7012859",
"0.6992649",
"0.69905186",
"0.6985864",
"0.69840795",
"0.6982791",
"0.69825083",
"0.69777834",
"0.6968144",
"0.696225",
"0.6958619",
"0.6952146",
"0.6950971",
"0.6947995",
"0.6942616",
"0.6942526",
"0.693422",
"0.6931558",
"0.6930823",
"0.6927897",
"0.6921547",
"0.6918738",
"0.6909767",
"0.6906974",
"0.6906221",
"0.6903495",
"0.6903491",
"0.6898145",
"0.68943197",
"0.6892537",
"0.6892264",
"0.6890578",
"0.6890223",
"0.6886439",
"0.6885871",
"0.68827736",
"0.6881961",
"0.68786246",
"0.6867004",
"0.68550414",
"0.68540454",
"0.68524534",
"0.68476295",
"0.684368",
"0.6840647",
"0.6838915"
] | 0.6999903 | 55 |
New() > itkTernaryAddImageFilterIUL2IUL2IUL2IUL2 Create a new object of the class itkTernaryAddImageFilterIUL2IUL2IUL2IUL2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF2IF2F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterISS2ISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.7961385",
"0.7959622",
"0.79339135",
"0.7845965",
"0.7836587",
"0.78198117",
"0.7744429",
"0.765472",
"0.7626084",
"0.75983095",
"0.75397074",
"0.7475012",
"0.73143613",
"0.7257502",
"0.72472817",
"0.72370017",
"0.7167874",
"0.71628785",
"0.7112177",
"0.71041477",
"0.70785636",
"0.7064429",
"0.7063965",
"0.7046232",
"0.70395935",
"0.7029957",
"0.70141953",
"0.70115495",
"0.70110214",
"0.70063776",
"0.70027786",
"0.69956875",
"0.69940853",
"0.6989268",
"0.6979971",
"0.6977805",
"0.69524515",
"0.69444615",
"0.6939566",
"0.6937508",
"0.69352764",
"0.69250226",
"0.6918345",
"0.6907365",
"0.6904202",
"0.69006413",
"0.68983394",
"0.6896872",
"0.68797326",
"0.6879361",
"0.68739754",
"0.6873023",
"0.68682945",
"0.685096",
"0.6850064",
"0.6848894",
"0.684139",
"0.68289053",
"0.6828104",
"0.68256384",
"0.6803361",
"0.68020004",
"0.6797263",
"0.6785548",
"0.67800796",
"0.6762626",
"0.67560816",
"0.67540246",
"0.67538273",
"0.67525697",
"0.6748974",
"0.674496",
"0.6717871",
"0.6715672",
"0.6713055",
"0.6694788",
"0.66916054",
"0.6688186",
"0.66721123",
"0.6668504",
"0.66655314",
"0.66567975",
"0.6645413",
"0.66374415",
"0.6635351",
"0.6618153",
"0.661168",
"0.66058683",
"0.6597111",
"0.6595037",
"0.65840405",
"0.658048",
"0.65799195",
"0.65794057",
"0.6578397",
"0.65755844",
"0.65704054",
"0.656996",
"0.6565211",
"0.65622914"
] | 0.81316435 | 0 |
itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(itkLightObject obj) > itkTernaryAddImageFilterIUL2IUL2IUL2IUL2 | def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkBinaryContourImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF2 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS2ISS2ISS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS2ISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)"
] | [
"0.7352389",
"0.7324347",
"0.7310399",
"0.7304036",
"0.72892624",
"0.7284477",
"0.72233516",
"0.7186214",
"0.7185038",
"0.7158732",
"0.71572274",
"0.7150702",
"0.7124124",
"0.7089004",
"0.7087673",
"0.7077956",
"0.70659965",
"0.7064473",
"0.7033919",
"0.70215225",
"0.70097744",
"0.70002913",
"0.6992403",
"0.69826216",
"0.69639754",
"0.6947931",
"0.694189",
"0.69389534",
"0.69347817",
"0.693306",
"0.6924707",
"0.69244456",
"0.69161093",
"0.69142467",
"0.6904631",
"0.6872454",
"0.68719447",
"0.68702734",
"0.68652725",
"0.68625605",
"0.6858912",
"0.68579024",
"0.6854627",
"0.683706",
"0.68363583",
"0.6832669",
"0.68205804",
"0.68148303",
"0.6814561",
"0.6789761",
"0.6787951",
"0.67861295",
"0.67799574",
"0.6778308",
"0.6774148",
"0.6773796",
"0.6770348",
"0.6769595",
"0.67685586",
"0.67675",
"0.6756416",
"0.67547923",
"0.67516315",
"0.67484576",
"0.67465526",
"0.6743744",
"0.6739903",
"0.6735742",
"0.6727871",
"0.6720124",
"0.6717643",
"0.6716549",
"0.67119753",
"0.6710653",
"0.6701041",
"0.6699662",
"0.669883",
"0.66977364",
"0.6692957",
"0.66918546",
"0.66786206",
"0.66713935",
"0.6669512",
"0.6666049",
"0.6658995",
"0.6656818",
"0.6653015",
"0.6643115",
"0.66413003",
"0.6641001",
"0.66404265",
"0.66365093",
"0.66316634",
"0.66270703",
"0.6622368",
"0.66220707",
"0.66195923",
"0.6614067",
"0.6613207",
"0.66089493"
] | 0.7681724 | 0 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUL3IUL3IUL3IUL3 | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC3_cast(obj)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterISS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUC3_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkBinaryContourImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)"
] | [
"0.7647566",
"0.7561853",
"0.7548788",
"0.753728",
"0.75245917",
"0.7512707",
"0.74969774",
"0.7495816",
"0.7483497",
"0.74777776",
"0.7451763",
"0.74383324",
"0.7436231",
"0.7423924",
"0.7415188",
"0.7409509",
"0.7404221",
"0.73695934",
"0.7364972",
"0.7341425",
"0.73326707",
"0.7324773",
"0.7305539",
"0.7287633",
"0.72739106",
"0.72568303",
"0.7253272",
"0.72455406",
"0.7245243",
"0.72432286",
"0.72427076",
"0.7239507",
"0.72329384",
"0.7227554",
"0.72053397",
"0.72042036",
"0.71989965",
"0.7197914",
"0.7197814",
"0.7192943",
"0.7189492",
"0.7182822",
"0.71683705",
"0.71585196",
"0.715355",
"0.7151498",
"0.7144038",
"0.71407497",
"0.71371734",
"0.7133431",
"0.71138126",
"0.71040434",
"0.70902246",
"0.7087941",
"0.7082793",
"0.70787674",
"0.7076537",
"0.7075294",
"0.7072881",
"0.7069373",
"0.7068286",
"0.70577925",
"0.70447016",
"0.7040179",
"0.70394194",
"0.7038129",
"0.70368475",
"0.7036604",
"0.7030339",
"0.70190626",
"0.70144176",
"0.70036626",
"0.700096",
"0.69975555",
"0.69972867",
"0.69956845",
"0.69948435",
"0.6993425",
"0.69919044",
"0.6986614",
"0.69822526",
"0.6981525",
"0.6980356",
"0.69774914",
"0.6973971",
"0.69736767",
"0.69732535",
"0.69723076",
"0.6969886",
"0.69609356",
"0.69597214",
"0.695857",
"0.6946633",
"0.69435227",
"0.69404644",
"0.69394755",
"0.69378203",
"0.69338304",
"0.6929201",
"0.6927426"
] | 0.70471674 | 62 |
New() > itkTernaryAddImageFilterIUL3IUL3IUL3IUL3 Create a new object of the class itkTernaryAddImageFilterIUL3IUL3IUL3IUL3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterID3ID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionBaseIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterISS3ISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShapePriorMAPCostFunctionIF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.81462044",
"0.8110021",
"0.7995434",
"0.7912326",
"0.79099345",
"0.787577",
"0.77468467",
"0.7723048",
"0.77158433",
"0.7480538",
"0.74527663",
"0.73376215",
"0.7280301",
"0.7275997",
"0.7273901",
"0.7256137",
"0.723621",
"0.72328305",
"0.7226152",
"0.7203478",
"0.7197948",
"0.71940047",
"0.7193793",
"0.7184354",
"0.7168656",
"0.7158746",
"0.7146881",
"0.7145578",
"0.71421826",
"0.7138644",
"0.71349096",
"0.7126242",
"0.712511",
"0.7117356",
"0.7116754",
"0.7107335",
"0.7098034",
"0.7085674",
"0.7084034",
"0.70687246",
"0.7066657",
"0.7055719",
"0.7052935",
"0.70196164",
"0.70086336",
"0.70031905",
"0.700279",
"0.7001722",
"0.6999823",
"0.6988504",
"0.6984632",
"0.69845986",
"0.69763047",
"0.6975429",
"0.69704926",
"0.6967096",
"0.6962284",
"0.696178",
"0.69580626",
"0.6957185",
"0.6948909",
"0.6942582",
"0.6928663",
"0.6916321",
"0.69052064",
"0.6903084",
"0.69001716",
"0.6894439",
"0.68854266",
"0.68724537",
"0.6871984",
"0.68470937",
"0.6830546",
"0.6830534",
"0.68298304",
"0.68290603",
"0.68193865",
"0.68182856",
"0.68012196",
"0.67963064",
"0.67933095",
"0.6792514",
"0.67902356",
"0.6789273",
"0.67886734",
"0.6787122",
"0.6784156",
"0.67777085",
"0.6775165",
"0.6774392",
"0.6771302",
"0.67691946",
"0.6765731",
"0.6765037",
"0.6748983",
"0.6733911",
"0.6733595",
"0.6733126",
"0.67227393",
"0.6716315"
] | 0.814467 | 1 |
itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(itkLightObject obj) > itkTernaryAddImageFilterIUL3IUL3IUL3IUL3 | def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkLabelStatisticsImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkBinaryContourImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkBinaryGrindPeakImageFilterISS3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3ISS3_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterISS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF3IF3SE3 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF3IF3SE3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkReinitializeLevelSetImageFilterIF3_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)"
] | [
"0.7748961",
"0.76095223",
"0.7589307",
"0.75607187",
"0.7520681",
"0.7497383",
"0.74870694",
"0.74818844",
"0.7464951",
"0.74644643",
"0.74344575",
"0.74030006",
"0.7387342",
"0.73710954",
"0.7369231",
"0.7366797",
"0.7360392",
"0.73567164",
"0.73386925",
"0.7336549",
"0.7334058",
"0.73039967",
"0.72962046",
"0.729028",
"0.7278138",
"0.7268886",
"0.72635156",
"0.72612345",
"0.7257982",
"0.72538674",
"0.7251342",
"0.7247933",
"0.7247424",
"0.7242147",
"0.71861386",
"0.7180064",
"0.71796095",
"0.7167509",
"0.71528697",
"0.7144289",
"0.7132494",
"0.7103681",
"0.70930994",
"0.708871",
"0.7088672",
"0.70791537",
"0.7072879",
"0.70674235",
"0.706087",
"0.7057453",
"0.705151",
"0.7049118",
"0.7042318",
"0.7022702",
"0.7018225",
"0.70153433",
"0.70075035",
"0.70050204",
"0.700201",
"0.6990087",
"0.69897646",
"0.6989372",
"0.6977316",
"0.69715214",
"0.6966815",
"0.6963852",
"0.6957955",
"0.6957941",
"0.69518477",
"0.6951811",
"0.6950883",
"0.69357115",
"0.69187266",
"0.6909384",
"0.69076157",
"0.6895842",
"0.68928856",
"0.6892268",
"0.6883077",
"0.68762094",
"0.6875685",
"0.6871048",
"0.6870582",
"0.68635243",
"0.6862708",
"0.6862443",
"0.68622714",
"0.6861518",
"0.68596566",
"0.685818",
"0.6840917",
"0.68338466",
"0.68332356",
"0.6833143",
"0.6832748",
"0.6832298",
"0.6824705",
"0.68223137",
"0.6812361",
"0.6811064"
] | 0.78951705 | 0 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUS2IUS2IUS2IUS2 | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS2IUS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS2IUS2IUS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS2IUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkBinaryContourImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS2IUS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUS2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUS2IUS2IUS2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS2IUS2IUS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS2IUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2IUS2_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def itkScalarImageToRunLengthFeaturesFilterIUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def itkLabelStatisticsImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUC2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS3_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUS3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2ISS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2ISS2_cast(obj)"
] | [
"0.795775",
"0.79334676",
"0.79191446",
"0.78629583",
"0.77982414",
"0.7746097",
"0.76591057",
"0.7601464",
"0.7575579",
"0.75715923",
"0.7570197",
"0.75645965",
"0.7556181",
"0.7556168",
"0.75122136",
"0.7493305",
"0.74912035",
"0.74833924",
"0.74627525",
"0.74295926",
"0.7421582",
"0.74178195",
"0.7377396",
"0.73747104",
"0.73746896",
"0.7360849",
"0.73583615",
"0.7322172",
"0.7305273",
"0.7300977",
"0.7292247",
"0.7276381",
"0.7260167",
"0.7249505",
"0.7248499",
"0.7246102",
"0.72017604",
"0.7185524",
"0.7175639",
"0.7168471",
"0.71639985",
"0.71383",
"0.7136998",
"0.71273726",
"0.7122849",
"0.7101316",
"0.70770514",
"0.707387",
"0.7065931",
"0.7063895",
"0.7060046",
"0.70483476",
"0.7047473",
"0.7031946",
"0.7014411",
"0.7009388",
"0.7003153",
"0.69942003",
"0.69806564",
"0.69653213",
"0.6955403",
"0.6953318",
"0.6949488",
"0.6948645",
"0.694758",
"0.6935836",
"0.6921774",
"0.6914875",
"0.6908864",
"0.6899089",
"0.689868",
"0.689842",
"0.6891227",
"0.68726283",
"0.68706876",
"0.68701893",
"0.68623465",
"0.6849633",
"0.6843911",
"0.68400246",
"0.6838738",
"0.6836775",
"0.68322515",
"0.6825839",
"0.6817024",
"0.68153316",
"0.68095356",
"0.6805173",
"0.68001807",
"0.6795088",
"0.6792682",
"0.67878574",
"0.6783434",
"0.67790747",
"0.6774632",
"0.67739236",
"0.67709225",
"0.67561954",
"0.6755764",
"0.67440265"
] | 0.71492976 | 41 |
New() > itkTernaryAddImageFilterIUS2IUS2IUS2IUS2 Create a new object of the class itkTernaryAddImageFilterIUS2IUS2IUS2IUS2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUS2IUS2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageToRunLengthFeaturesFilterIUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkPeakSignalToNoiseRatioCalculatorIUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterISS2ISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryGrindPeakImageFilterIUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkContourDirectedMeanDistanceImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsLabelMapFilterLM2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelShapeOpeningImageFilterIUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.80503565",
"0.7489746",
"0.7481182",
"0.74704665",
"0.7466978",
"0.7464551",
"0.74596614",
"0.74402666",
"0.74363726",
"0.7434975",
"0.73753166",
"0.734277",
"0.73379105",
"0.7327973",
"0.7306081",
"0.7305786",
"0.7299731",
"0.7296037",
"0.7261096",
"0.7260247",
"0.7259942",
"0.72414213",
"0.72326684",
"0.7224352",
"0.7211549",
"0.7182855",
"0.7161747",
"0.7136908",
"0.7096544",
"0.7092104",
"0.70904887",
"0.7058917",
"0.70447236",
"0.70246905",
"0.70084304",
"0.6960547",
"0.6898716",
"0.6869549",
"0.68538666",
"0.6853644",
"0.6847559",
"0.6810767",
"0.6800618",
"0.67995495",
"0.6785803",
"0.67576766",
"0.6746092",
"0.6740426",
"0.6737353",
"0.6718763",
"0.67077386",
"0.6705034",
"0.67006904",
"0.66964835",
"0.66870844",
"0.6682333",
"0.6681994",
"0.6668103",
"0.6657333",
"0.6638107",
"0.6637192",
"0.6628931",
"0.66242456",
"0.6619742",
"0.6618072",
"0.66131365",
"0.6612755",
"0.6610287",
"0.6610049",
"0.6607736",
"0.66074353",
"0.6604942",
"0.65996665",
"0.6593047",
"0.6584563",
"0.65728885",
"0.65699774",
"0.6568834",
"0.6562848",
"0.6562133",
"0.655684",
"0.65551066",
"0.6551367",
"0.65510976",
"0.654415",
"0.6543427",
"0.65341693",
"0.65285355",
"0.652471",
"0.652115",
"0.65146166",
"0.6513752",
"0.65063787",
"0.6504927",
"0.6500571",
"0.6492355",
"0.6487302",
"0.64764845",
"0.6467225",
"0.6463884"
] | 0.83108383 | 0 |
itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_cast(itkLightObject obj) > itkTernaryAddImageFilterIUS2IUS2IUS2IUS2 | def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj)",
"def itkBinaryContourImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS2IUS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUS2IUS2IUS2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS2IUS2IUS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS2IUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS2IUS2IUS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS2IUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS2IUS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_cast(*args)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUS2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def itkLabelStatisticsImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2IUS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def itkLabelStatisticsImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2IUS2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUC2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS2IUS2_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def itkScalarImageToRunLengthFeaturesFilterIUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkSubtractImageFilterIUS2IUS2IUS2_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUS2IUS2IUS2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUS3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkBinaryContourImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2ISS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2ISS2_cast(obj)"
] | [
"0.8039066",
"0.803752",
"0.799232",
"0.78865457",
"0.7822779",
"0.7739733",
"0.76613253",
"0.7638676",
"0.76206696",
"0.7589836",
"0.75830173",
"0.7563245",
"0.75561917",
"0.7547682",
"0.7529822",
"0.7529683",
"0.75135267",
"0.75091255",
"0.74985117",
"0.74913585",
"0.7485007",
"0.747801",
"0.74627066",
"0.744452",
"0.7440807",
"0.74308836",
"0.74170864",
"0.73783904",
"0.7365111",
"0.73575485",
"0.73466104",
"0.72976154",
"0.727273",
"0.7245342",
"0.7224478",
"0.7223872",
"0.72207105",
"0.7210812",
"0.72074336",
"0.7189749",
"0.7179922",
"0.71742445",
"0.71737766",
"0.7157944",
"0.71538365",
"0.7149934",
"0.7149175",
"0.71449137",
"0.71434003",
"0.714143",
"0.71002704",
"0.7099323",
"0.7098361",
"0.7093099",
"0.7090935",
"0.70742387",
"0.7072173",
"0.70665973",
"0.704919",
"0.70394385",
"0.70359755",
"0.6996125",
"0.69902784",
"0.69808453",
"0.69781554",
"0.69708806",
"0.69653744",
"0.6964901",
"0.6961071",
"0.6953997",
"0.695399",
"0.6950156",
"0.6946174",
"0.69453245",
"0.69448274",
"0.69398683",
"0.6926338",
"0.69208235",
"0.6916459",
"0.68991",
"0.6894533",
"0.68932426",
"0.6890611",
"0.68747985",
"0.68718034",
"0.6864042",
"0.68603003",
"0.6847939",
"0.684698",
"0.68458855",
"0.684093",
"0.6834261",
"0.6817507",
"0.6814791",
"0.68072146",
"0.6807027",
"0.6794422",
"0.67913043",
"0.67899454",
"0.6789317"
] | 0.79412234 | 3 |
cast(itkLightObject obj) > itkTernaryAddImageFilterIUS3IUS3IUS3IUS3 | def cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS2IUS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def itkBinaryContourImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS2IUS2IUS2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS2IUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUS3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS2IUS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS2IUS2_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUS2_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_cast(*args)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUS2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS2 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUS2_cast(obj)",
"def itkScalarImageToRunLengthFeaturesFilterIUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)"
] | [
"0.7828117",
"0.77500296",
"0.7680378",
"0.76507425",
"0.76472664",
"0.76291966",
"0.7628558",
"0.76244795",
"0.76156825",
"0.76125294",
"0.7602379",
"0.75422204",
"0.75175846",
"0.7516735",
"0.75001353",
"0.7496559",
"0.7451433",
"0.7428584",
"0.74199283",
"0.7396845",
"0.7393191",
"0.7392055",
"0.73317313",
"0.73311055",
"0.7325116",
"0.73200786",
"0.73139393",
"0.7295904",
"0.72846454",
"0.72774386",
"0.72514963",
"0.7247226",
"0.72388077",
"0.7229871",
"0.7216801",
"0.72163695",
"0.7209472",
"0.7206214",
"0.71997267",
"0.71976787",
"0.7188905",
"0.718669",
"0.71577847",
"0.7154878",
"0.7148227",
"0.714381",
"0.7141549",
"0.71397954",
"0.71344346",
"0.7127718",
"0.71153265",
"0.70875466",
"0.7087138",
"0.7086316",
"0.7070626",
"0.7070292",
"0.7063154",
"0.70572776",
"0.70569533",
"0.704267",
"0.7039594",
"0.70329607",
"0.70255727",
"0.7023326",
"0.70215994",
"0.7019849",
"0.701627",
"0.7009291",
"0.70085526",
"0.6990006",
"0.6985998",
"0.69852537",
"0.6982267",
"0.69779617",
"0.69660795",
"0.6965038",
"0.6961569",
"0.69612366",
"0.6960188",
"0.6947833",
"0.69432217",
"0.6941461",
"0.6935015",
"0.69311047",
"0.692955",
"0.69206125",
"0.6914866",
"0.69138706",
"0.6908919",
"0.69015723",
"0.69007546",
"0.68894434",
"0.68810946",
"0.6873794",
"0.68688416",
"0.68569255",
"0.6856682",
"0.6843662",
"0.68427354",
"0.6842202"
] | 0.72114044 | 36 |
New() > itkTernaryAddImageFilterIUS3IUS3IUS3IUS3 Create a new object of the class itkTernaryAddImageFilterIUS3IUS3IUS3IUS3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsImageFilterIUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIUS3IUS3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkPeakSignalToNoiseRatioCalculatorIUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageToRunLengthFeaturesFilterIUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkContourDirectedMeanDistanceImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkLabelStatisticsImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterID3ID3D.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryGrindPeakImageFilterIUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkStatisticsLabelMapFilterLM3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterISS3ISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.81209564",
"0.76765245",
"0.76381725",
"0.7637697",
"0.76059175",
"0.75846446",
"0.75640565",
"0.75020564",
"0.7486624",
"0.747826",
"0.7462136",
"0.7449853",
"0.74443847",
"0.7438893",
"0.7429914",
"0.74282986",
"0.74251896",
"0.74120355",
"0.73986375",
"0.7391535",
"0.7381102",
"0.7380992",
"0.73763245",
"0.7364215",
"0.7332884",
"0.7310022",
"0.72656566",
"0.72583055",
"0.7250104",
"0.72257584",
"0.72214633",
"0.7217943",
"0.72116077",
"0.71607137",
"0.71411306",
"0.70318174",
"0.7013781",
"0.69855356",
"0.69844836",
"0.69628453",
"0.6936746",
"0.69207096",
"0.69106895",
"0.6907101",
"0.69024706",
"0.6889329",
"0.68850464",
"0.68843865",
"0.68817383",
"0.6872652",
"0.6861108",
"0.6858187",
"0.6852822",
"0.6846082",
"0.6842966",
"0.6840489",
"0.6826174",
"0.68239456",
"0.6817657",
"0.6809683",
"0.6809475",
"0.68040544",
"0.6786898",
"0.6785209",
"0.67846555",
"0.6784332",
"0.6778596",
"0.67746234",
"0.6767614",
"0.6764881",
"0.67565507",
"0.6753017",
"0.67370117",
"0.67325205",
"0.6731595",
"0.67248154",
"0.67212063",
"0.67021596",
"0.6695393",
"0.66941893",
"0.66936475",
"0.6693565",
"0.66885906",
"0.66802585",
"0.66782296",
"0.66743696",
"0.66552824",
"0.6649457",
"0.664884",
"0.6643653",
"0.663727",
"0.66355443",
"0.66283685",
"0.6624125",
"0.6614811",
"0.6602824",
"0.6602491",
"0.6594559",
"0.6590862",
"0.65866965"
] | 0.83841634 | 0 |
itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(itkLightObject obj) > itkTernaryAddImageFilterIUS3IUS3IUS3IUS3 | def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def itkBinaryContourImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUS3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3IUS3_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterISS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3ISS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUS3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3ISS3_cast(obj)",
"def itkLabelStatisticsImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUC3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkBinaryContourImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUS3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUS3IUS3_cast(obj)",
"def itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterISS3ISS3ISS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterISS3ISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC3IUS3 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC3IUS3_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUS3_cast(obj)",
"def itkBinaryGrindPeakImageFilterIUC3_cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3IUC3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3IUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryGrindPeakImageFilterIUC3 *\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_cast(obj)",
"def itkScalarImageKmeansImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterISS3ISS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterISS3ISS3_cast(obj)"
] | [
"0.80789053",
"0.80319655",
"0.8008976",
"0.79430354",
"0.79271203",
"0.78627837",
"0.78258866",
"0.782384",
"0.781271",
"0.779851",
"0.77923465",
"0.7790801",
"0.7773563",
"0.77652675",
"0.7721575",
"0.770351",
"0.7701595",
"0.7673285",
"0.7649216",
"0.76348764",
"0.7603356",
"0.7602878",
"0.76006067",
"0.7584524",
"0.7565475",
"0.75587696",
"0.75365216",
"0.7534538",
"0.75322545",
"0.7508018",
"0.7485593",
"0.74799764",
"0.747426",
"0.7466759",
"0.74558264",
"0.74360454",
"0.7432187",
"0.7409608",
"0.740929",
"0.7401684",
"0.7391952",
"0.7390347",
"0.73799044",
"0.737861",
"0.7373628",
"0.73704916",
"0.7340233",
"0.7339884",
"0.73324823",
"0.73301226",
"0.7325635",
"0.7324227",
"0.7318883",
"0.7310163",
"0.7308022",
"0.7307965",
"0.72915775",
"0.728434",
"0.7281844",
"0.72804487",
"0.727412",
"0.7273177",
"0.7272395",
"0.7253996",
"0.7253277",
"0.7250007",
"0.7250003",
"0.72335243",
"0.7225341",
"0.7213754",
"0.72130394",
"0.7207887",
"0.72028345",
"0.7193078",
"0.7192799",
"0.71815777",
"0.717562",
"0.7174385",
"0.7158705",
"0.71573514",
"0.71535397",
"0.71514136",
"0.71478695",
"0.7147532",
"0.71382344",
"0.71372116",
"0.7133519",
"0.7130278",
"0.71254814",
"0.7105085",
"0.71035403",
"0.70923495",
"0.7089513",
"0.70764613",
"0.70645255",
"0.70618397",
"0.70608336",
"0.7043591",
"0.70257014",
"0.7019708"
] | 0.81228995 | 0 |
Compute the aperture radius necessary to have a certain SPAXEL SCALE [in mas] at a certain WAVELENGTH [in microns] | def spaxel_scale(scale=4, wave=1.0):
scale_rad = scale / MILIARCSECS_IN_A_RAD
rho = scale_rad * ELT_DIAM / (wave * 1e-6)
print(rho) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getSphereRadius(self):\n return 1.5",
"def totalMass(self, trunc=None):\n if trunc is None:\n trunc = self.trunc\n rVir = self.U.rVir(m, z)\n rS, rhoS, c = self.rS_rhoS_c(m, z)\n # truncation radius over scale radius\n xMax = trunc * rVir/rS\n result = 4./3. * np.pi * rS**3 * rhoS\n result = xMax - np.log(1 + xMax)\n return result",
"def totalMass(self, trunc=None):\n if trunc is None:\n trunc = self.trunc\n rVir = self.U.rVir(m, z)\n rS, rhoS, c = self.rS_rhoS_c(m, z)\n # truncation radius over scale radius\n xMax = trunc * rVir/rS\n result = 4./3. * np.pi * rS**3 * rhoS\n result = xMax - np.log(1 + xMax)\n return result",
"def sphere_volume(r):\n return (4/3) * 3.14159 * r**3",
"def sphere_volume(sphere_radius):\n return (4 / 3 * np.pi * sphere_radius**3)",
"def sphere_volume(r):\n\treturn 4/3. * math.pi * r ** 3",
"def beamradius(params,z):\n \n w0=params[0] # beam width at waist [e.g. meters]\n zw=params[1] # waist position [e.g. meters]\n lam = params[2] # wavelength [meters]\n \n zR=np.pi*w0**2/lam # Raleigh length [e.g. meters]\n w=w0*np.sqrt(1+((z-zw)/zR)**2) # beam width at z [e.g. meters]\n R=z*(1+(zR/z)**2) # beam phasefront curvature at z\n\n return w,R,zR # values at pos z [e.g. meters]",
"def spherearea(dia):\n r = dia*1e-4 # convert to cm\n return(4*np.pi*r**2)",
"def check_spaxel_scale(rho_aper, wavelength):\n\n SPAXEL_RAD = rho_aper * wavelength / ELT_DIAM * 1e-6\n SPAXEL_MAS = SPAXEL_RAD * MILIARCSECS_IN_A_RAD\n print('%.2f mas spaxels at %.2f microns' %(SPAXEL_MAS, wavelength))",
"def beam_radius(self, x, Amp, beam_type='vortex', Amp_Flag=True):\r\n \r\n# dx = x[[0],[1]]-x[[0],[0]]\r\n# \r\n# Intensity = (Amp*Amp.conjugate()).real\r\n# N,N = Amp.shape\r\n# \r\n# if beam_type == 'vortex':\r\n# \r\n# \r\n# m,n = matrix_Lib.getPositon(Intensity)\r\n# \r\n# elif beam_type == 'gauss':\r\n# \r\n# m,n = matrix_Lib.getPositon(Intensity,value=np.max(Intensity)/np.e**2)\r\n# \r\n# # cartesian coordinate only;\r\n# radius = np.sqrt(((m-N/2)*dx)**2+((n-N/2)*dx)**2)\r\n# \r\n# return radius\r\n \r\n dx = x[[0],[1]]-x[[0],[0]]\r\n \r\n if Amp_Flag:\r\n Intensity = (Amp*Amp.conjugate()).real\r\n else:\r\n Intensity = Amp\r\n \r\n N,N = Amp.shape\r\n \r\n if beam_type == 'vortex':\r\n \r\n radius = 0\r\n Max = np.max(Intensity)\r\n \r\n NumofDots = 0\r\n \r\n for i in range(N):\r\n for j in range(N):\r\n if Intensity[i,j] > math.floor(Max*1e8)/1e8:\r\n radius += np.sqrt(((i-N/2)*dx)**2+((j-N/2)*dx)**2)\r\n NumofDots += 1\r\n \r\n radius = radius/NumofDots\r\n \r\n elif beam_type == 'gauss':\r\n \r\n m,n = self.getPositon(Intensity, value = np.max(Intensity)/np.e**2)\r\n # appropriate for cartesian coordinate only;\r\n radius = np.sqrt(((m-N/2)*dx)**2+((n-N/2)*dx)**2)\r\n \r\n return radius*2",
"def asphericity(Rnm_eg):\n num = (Rnm_eg[0] - Rnm_eg[2])**2 + (Rnm_eg[1] - Rnm_eg[2])**2 + (Rnm_eg[0] - Rnm_eg[1])**2\n dem = 2*(Rnm_eg[0] + Rnm_eg[1] + Rnm_eg[2])**2\n Asphere = num/dem\n return Asphere",
"def asphericity(Rnm_eg):\n num = (Rnm_eg[0] - Rnm_eg[2])**2 + (Rnm_eg[1] - Rnm_eg[2])**2 + (Rnm_eg[0] - Rnm_eg[1])**2\n dem = 2*(Rnm_eg[0] + Rnm_eg[1] + Rnm_eg[2])**2\n Asphere = num/dem\n return Asphere",
"def fried_parameter_cm(wavelength,arcseconds_of_seeing_500nm=1.,zenith_angle_deg = 0.):\n r0_500nm_cm = (500e-9/(arcseconds_of_seeing_500nm*(np.pi/(180*3600))))*100\n k = r0_500nm_cm/(500e-9)**(6./5)\n r00 = k*wavelength**(6./5.)\n zenith_angle_rad = np.radians(zenith_angle_deg)\n r0z = r00 * np.cos(zenith_angle_rad)**(3/5.) #p60 DFB POI\n return r0z",
"def sphere_volume(radius : number) -> number:\n volume = 4/3*(pi*radius*radius*radius)\n return volume",
"def sphrad(vol):\n return (3.*vol/(4.*np.pi))**(1./3.)",
"def sphere_sre(solution):\n a = 0\n bias = 0.2\n x = solution.get_x()\n x1 = x[:10]\n x2 = x[10:]\n value1 = sum([(i-bias)*(i-bias) for i in x1])\n value2 = 1/len(x) * sum([(i-bias)*(i-bias) for i in x2])\n return value1 + value2",
"def airy_and_slicer(surface, wavelength, scale_mas, PSF_window, N_window):\n\n # Print message to know we are updating the cache\n print('Recalculating Airy Pattern for %.3f microns' % wavelength)\n\n # Plate scales [Px, Py] for each spaxel scale in mm / arcsec,\n # depending on the surface [IS: Image Slicer, DET: Detector]\n plate_scales = {'IS': {4.0: [125, 250], 60.0: [16.67, 16.67]},\n 'DET': {4.0: [3.75, 7.5], 60.0: [0.5, 0.5]}}\n plate_x = plate_scales[surface][scale_mas][0]\n plate_y = plate_scales[surface][scale_mas][1]\n\n # We know how many Microns the pixels of the Geometric PSF span [PSF_window / N_window]\n pix_sampling = PSF_window / N_window # micron at the detector plane\n # Using the plate scale we calculate how many m.a.s each of those pixels have to span\n pix_scale_x = pix_sampling / plate_x # milliarcsec / pixel\n pix_scale_y = pix_sampling / plate_y # milliarcsec / pixel\n\n # Calculate the relative size of the pupil aperture needed to ensure the PSF is\n # sampled with the given pix_scale at the focal plane\n ELT_DIAM = 39\n MILIARCSECS_IN_A_RAD = 206265000\n pix_rad_x = pix_scale_x / MILIARCSECS_IN_A_RAD # radians / pixel\n pix_rad_y = pix_scale_y / MILIARCSECS_IN_A_RAD\n RHO_APER_x = pix_rad_x * ELT_DIAM / (wavelength * 1e-6)\n RHO_APER_y = pix_rad_y * ELT_DIAM / (wavelength * 1e-6)\n RHO_OBSC_x = 0.30 * RHO_APER_x # ELT central obscuration\n RHO_OBSC_y = 0.30 * RHO_APER_y # ELT central obscuration\n\n # Sanity check\n PIX_RAD_x = RHO_APER_x * wavelength / ELT_DIAM * 1e-6\n PIX_RAD_y = RHO_APER_y * wavelength / ELT_DIAM * 1e-6\n PIX_MAS_x = PIX_RAD_x * MILIARCSECS_IN_A_RAD\n PIX_MAS_y = PIX_RAD_y * MILIARCSECS_IN_A_RAD\n\n # Define the ELT pupil mask. Note that we use a central obscuration too\n N = 2048\n x = np.linspace(-1, 1, N)\n xx, yy = np.meshgrid(x, x)\n\n # To get the anamorphic scaling we define the equation for an ellipse\n rho = np.sqrt((xx / RHO_APER_x) ** 2 + (yy / RHO_APER_y) ** 2)\n\n # (1) Propagate to the Image Slicer Focal plane\n elt_mask = (RHO_OBSC_x / RHO_APER_x < rho) & (rho < 1.0)\n pupil = elt_mask * np.exp(1j * elt_mask)\n image_electric = fftshift(fft2(pupil))\n\n if surface == 'IS':\n # print(\"IS\")\n # We are already at the Image Slicer, don't do anything else\n min_pix, max_pix = N // 2 - N_window // 2, N // 2 + N_window // 2\n final_psf = (np.abs(image_electric))**2\n final_psf /= np.max(final_psf)\n crop_psf = final_psf[min_pix:max_pix, min_pix:max_pix]\n\n elif surface == 'DET':\n # print(\"DET\")\n # (1.1) Add slicer effect by masking\n # We mask the PSF covering a band of size 1x SPAXEL, depending on the scale\n # If we have 4x4 mas, then we cover a band of 4 mas over the PSF\n x_min, x_max = -N/2 * PIX_MAS_x, N/2 * PIX_MAS_x\n y_min, y_max = -N/2 * PIX_MAS_y, N/2 * PIX_MAS_y\n x_slice = np.linspace(x_min, x_max, N, endpoint=True)\n y_slice = np.linspace(y_min, y_max, N, endpoint=True)\n x_grid, y_grid = np.meshgrid(x_slice, y_slice)\n slicer_mask = np.abs(y_grid) < scale_mas / 2\n\n # ## Show the PSF both in [mas] space where it should be circular and in [pixel] space where it should be anamorphic\n # fig, ax = plt.subplots(1, 1)\n # img1 = ax.imshow((np.abs(image_electric))**2, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # # plt.colorbar(img1, ax=ax)\n # ax.set_title(r'Airy Pattern | %.1f mas scale | Wavelength: %.3f $\\mu$m' % (scale_mas, wavelength))\n # ax.set_xlabel(r'X [mas]')\n # ax.set_ylabel(r'Y [mas]')\n # ax.set_xlim([-10, 10])\n # ax.set_ylim([-10, 10])\n #\n # fig, ax = plt.subplots(1, 1)\n # img1 = ax.imshow((np.abs(image_electric))**2, extent=[-N/2, N/2, -N/2, N/2], cmap='bwr')\n # ax.set_title(r'Airy Pattern | %.1f mas scale | Wavelength: %.3f $\\mu$m' % (scale_mas, wavelength))\n # ax.set_xlabel(r'Pixels [ ]')\n # ax.set_ylabel(r'Pixels [ ]')\n # ax.set_xlim([-100, 100])\n # ax.set_ylim([-100, 100])\n\n # plt.show()\n\n # (2) Propagate the masked electric field to Pupil Plane\n pup_grating = ifft2(fftshift(slicer_mask * image_electric))\n # (2.1) Add pupil mask, this time without the central obscuration\n aperture_mask = rho < 1.0\n\n # (3) Propagate back to Focal Plane\n final_focal = fftshift(fft2(aperture_mask * pup_grating))\n final_psf = (np.abs(final_focal))**2\n final_psf /= np.max(final_psf)\n\n # (4) Crop the PSF to fit to the necessary window to ease the convolutions\n min_pix, max_pix = N//2 - N_window//2, N//2 + N_window//2\n crop_psf = final_psf[min_pix:max_pix, min_pix:max_pix]\n\n # If we want to show the plots for Documentation\n\n # fig, (ax1, ax2, ax3) = plt.subplots(1, 3)\n # psf_airy = (np.abs(image_electric))**2\n # img1 = ax1.imshow(psf_airy, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # ax1.axhline(y=scale_mas/2, linestyle='--', color='black')\n # ax1.axhline(y=-scale_mas/2, linestyle='--', color='black')\n # ax1.set_xlabel(r'X [mas]')\n # ax1.set_ylabel(r'Y [mas]')\n # ax1.set_xlim([-15, 15])\n # ax1.set_ylim([-15, 15])\n # ax1.set_title(r'Airy Pattern | Slicer Mask %.1f mas' % scale_mas)\n #\n # img2 = ax2.imshow(aperture_mask * (np.abs(pup_grating)**2), extent=[-1, 1, -1, 1], cmap='bwr')\n # ax2.set_title(r'Pupil Plane | Aperture Mask')\n # ax2.set_xlim([-0.25, 0.25])\n # ax2.set_ylim([-0.25, 0.25])\n #\n # img3 = ax3.imshow(final_psf, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # ax3.set_xlabel(r'X [mas]')\n # ax3.set_ylabel(r'Y [mas]')\n # ax3.set_xlim([-15, 15])\n # ax3.set_ylim([-15, 15])\n # ax3.set_title(r'Diffraction Effects')\n # plt.show()\n\n return crop_psf",
"def rho_spaxel_scale(spaxel_scale=4.0, wavelength=1.0):\n\n scale_rad = spaxel_scale / MILIARCSECS_IN_A_RAD\n rho = scale_rad * ELT_DIAM / (wavelength * 1e-6)\n return rho",
"def magnification(w0, lambda0, s, f, M2=1):\n zR2 = z_rayleigh(w0, lambda0, M2)**2\n return f/np.sqrt((s+f)**2+zR2)",
"def _compute_mass(box_size, evo_config):\n\n # ensure format\n standard_volume = evo_config['individuals']['standard_volume']\n if isinstance(box_size, list):\n if len(box_size) == 1: # sphere\n box_size = box_size[0]\n box_size = np.asarray(box_size)\n\n if np.prod(box_size.shape) < 2: # sphere\n return 4 / 3 * np.pi * box_size**3 / standard_volume\n else: # box\n if np.ndim(box_size) == 1:\n return np.prod(box_size * 2) / standard_volume\n else:\n return np.prod(box_size * 2, axis=1) / standard_volume",
"def sphere_area(radius : number) -> number:\n area = 4*pi*radius*radius\n return area",
"def calcul_v_sphere(r):\n volume = 4/3 * math.pi * (r ** 3)\n return volume",
"def compute_projmass(args):\n radius = args.radius/3600.0\n\n k_map = pyfits.open(args.kappa_map)\n k_data = k_map[0].data\n k_data_tmp = k_data\n\n pix_dim = math.fabs(k_map[0].header[\"CDELT1\"])\n pix_unit = k_map[0].header[\"CUNIT1\"]\n shape = k_map[0].data.shape\n\n x_axis = np.linspace(-(shape[0] - 1.0)/2.0*pix_dim , \\\n (shape[0] - 1.0)/2.0*pix_dim, shape[0])\n y_axis = np.linspace(-(shape[1] - 1.0)/2.0*pix_dim , \\\n (shape[1] - 1.0)/2.0*pix_dim, shape[1])\n\n if pix_unit != \"deg\":\n print \"Error, pixel unit not in deg\"\n if (x_axis.max() - x_axis.min())/2.0 < radius:\n print \"Error, the radius is larger than the image limits\"\n\n\n proj_mass = 0.0\n for i_x in range(shape[0]):\n for i_y in range(shape[1]):\n if x_axis[i_x]**2.0 + y_axis[i_y]**2.0 <= radius**2.0:\n #k_data_tmp[i_x][i_y] = 0.0\n proj_mass += k_data_tmp[i_x][i_y]\n\n print \"%e M_sol\" % (proj_mass*1E12)\n\n if args.plot_cont:\n circ = fc.make_circunference(radius*3600, 0, 0)\n plt.plot(circ[0], circ[1], \"k--\", linewidth = 2)\n plt.contour(x_axis*3600.0, y_axis*3600.0, k_data)\n plt.show()\n\n return proj_mass",
"def calculate_soma_surface(data: Data) -> float:\n\n soma = data.morphology.get_soma()\n return 4.0 * math.pi * soma['radius'] * soma['radius']",
"def resolution(self, radius, wave = None):\n dev = Prism.minDeviation(self,wave)\n alpha = dev/2 + self.angle/2\n\n # Form path difference between top and bottom of the beam\n d = 4*radius*math.sin(self.angle/2)/math.cos(alpha)\n dmax = 2.0*self.height*math.tan(self.angle/2) # Length of bottom of prism\n if d > dmax:\n d = dmax\n print(\"Resolution limited by size of prism\")\n\n\n dn = self.n.getDerivative(wave) # dn/d lambda\n return 1000*d*dn # scale to microms",
"def sphvol(r):\n return (4./3.)*np.pi*(r**3.)",
"def nsphere_volume(n, r):\n return math.pi ** (n / 2) * (r ** n) / gamma(n / 2 + 1)",
"def angRMS(angs, weights):\n diff = angs - angMean(angs, weights)\n diff[diff < -np.pi] += 2*np.pi\n diff[diff > np.pi] -= 2*np.pi\n return np.sqrt( angMean(diff**2, weights) ) # weighted std dev",
"def molar_mass_dry_air():\n return 28.9647",
"def sphereVolume(radius):\n volume = (4 / 3) * math.pi * radius ** 3\n return volume",
"def app_mag(abs_mag, phase_angle, slope_g, d_ast_sun, d_ast_earth):\n\n # Compute the apparent / visual magnitude\n mag = red_mag(abs_mag, phase_angle, slope_g) \\\n + 5.0 * np.log10(d_ast_sun * d_ast_earth)\n\n # Return the apparent magnitude\n return mag",
"def sphere_r_intensity(img):\n pixels = []\n for j in range(0, img.shape[0]):\n for i in range(1, 40):\n pixels.append(img[j, img.shape[1] - i])\n\n return np.mean(pixels)",
"def get_radius(size):\n return (size * 10) - 5",
"def rms(a):\n\treturn np.sqrt(np.sum(np.power(a,2))/len(a))",
"def surfaceIntSphere(r: float) -> float:\n return 4.0 * np.pi * r * r",
"def sphereArea(radius):\n area = 4 * math.pi * radius ** 2\n return area",
"def find_radius(mass,delta_m,eta,xi,mue,pp_factor):\n\n #range of radii; reason in detail under step 9 of report\n r_low = 0.01*Rsun # MKS\n r_high = 3*Rsun # MKS\n \n radius = brentq(lum_difference, r_low, r_high, xtol=1.0e-4, args = (mass,delta_m,eta,xi,mue,pp_factor))\n return radius",
"def Mean_square_beam_radius(self,Amp,x,y,Amp_flag=True):\r\n \r\n if Amp_flag:\r\n I = (Amp*np.conjugate(Amp)).real\r\n else:\r\n I = Amp\r\n dx = x[0,1]-x[0,0]\r\n x_c,y_c = self.Light_Spot_Centroid(Amp,x,y,Amp_flag)\r\n Nominator_x = self.double_trapz(I*(x-x_c)**2,dx=dx,dy=dx)\r\n Nominator_y = self.double_trapz(I*(y-y_c)**2,dx=dx,dy=dx)\r\n Denominator = self.double_trapz(I,dx=dx,dy=dx)\r\n Res = Nominator_x/Denominator+Nominator_y/Denominator\r\n \r\n return np.sqrt(Res)",
"def beam_radius(w0, lambda0, z, z0=0, M2=1):\n zz = (z-z0)/z_rayleigh(w0, lambda0, M2)\n return w0*np.sqrt(1+zz**2)",
"def apply_spectral_radius(w,spectral_radius):\n assert len(w.shape)==2 and w.shape[0]==w.shape[1],\\\n \"Error: apply_spectral_radius must receive 'w' as a square matrix.\"\n\n new_w = np.array(w)\n spectral_radius_w = calc_spectral_radius(w)\n if spectral_radius_w > 0.0:\n new_w = (w / spectral_radius_w) * spectral_radius\n else:\n print(\"Warning: Spectral radius of 'w' is zero (because of small size). Therefore, spectral radius does not changed.\")\n\n return new_w",
"def calc_hypersphere_volume(r: float, n: int) -> float:\n return (math.pi ** (n / 2) * r ** n) / gamma((n / 2) + 1)",
"def rscale(mag=10.0):\n if mag > 11.5:\n return 0.5\n elif mag > 11.0:\n return 1.0\n elif mag > 10.5:\n return 1.5\n elif mag > 10.0:\n return 1.5\n elif mag > 9.5:\n return 2.0\n elif mag > 9.0:\n return 2.5\n elif mag > 8.5:\n return 3.0\n else:\n return 3.5",
"def _mass_radius_relation(R, A, n):\n\n M = A * (R ** ((3 - n) / (1 - n)))\n return M",
"def mean_radius(self):\n return (self.semimajor_axis + self.semimedium_axis + self.semiminor_axis) / 3",
"def calculate_wavelength(period, depth, gravity):\r\n return geometry.gmCalculateWavelength(period, depth, gravity)",
"def Radius(self, *args):\n return _Bnd.Bnd_Sphere_Radius(self, *args)",
"def mas2rad(x):\n return x * 4.8481368110953599e-09",
"def cal_mass(self):\n\n if not self.check_def(['E','px','py','pz']):\n sys.exit('Particle error: Quadri impulsion not define (error for mass routine)')\n\n\n \n if self.E**2-self.px**2-self.py**2-self.pz**2>1e-7: #precision problem\n self.mass=math.sqrt(self.E**2-self.px**2-self.py**2-self.pz**2)\n else:\n self.mass=0",
"def Calc_axe_spheroid(r,c):\n return np.sqrt((r**3)/c)",
"def effective_radius(self, n):\n\n er2 = 5.0 * self.sa / n\n er = np.sqrt(er2)\n\n return er",
"def Truncated_radius(self):\n r_trunc = fminbound(self.Mass_diff_005, -10., np.log10(self.scale_radius))\n return 10**float(r_trunc)",
"def sphere(self, x):\r\n # return np.random.rand(1)[0]**0 * sum(x**2) + 1 * np.random.rand(1)[0]\r\n return sum((x+0)**2)",
"def w(lam, gam, p):\n return np.sqrt((1 - lam*np.cos(2*np.pi*p ) )**2 + (gam*lam*np.sin(2*np.pi*p ) )**2 )",
"def get_scale(units, compartmentId, volume, extracellularVolume):\r\n if compartmentId == 'c':\r\n V = volume\r\n else:\r\n V = extracellularVolume\r\n\r\n if units == 'uM':\r\n return 1. / N_AVOGADRO / V * 1e6\r\n elif units == 'mM':\r\n return 1. / N_AVOGADRO / V * 1e3\r\n elif units == 'molecules':\r\n return 1.\r\n else:\r\n raise Exception('Invalid units \"%s\"' % units)",
"def sphere(\n network,\n pore_diameter='pore.diameter'\n):\n return 4/3*_pi*(network[pore_diameter]/2)**3",
"def spectral(w, s=1.0):\n n_in, n_out = w.size()\n n = max(n_out, n_in)\n gain = s / math.sqrt(n)\n return w.normal_(0, 1).mul_(gain)",
"def aperture_phot(self,data,x,y,v):\n r = np.sqrt((x-self.avg_map_fits['Values'][1])**2 + (y-self.avg_map_fits['Values'][3])**2)\n \n inner = (r < 8./60.) & np.isfinite(data) \n outer = (r > 8.5/60.) & (r < 12./60.) & np.isfinite(data)\n\n annu = np.nanmedian(data[outer])\n annu_rms = np.nanstd(data[outer])\n flux = np.sum(data[inner]) - annu*np.sum(inner)\n\n c = 3e8\n kb=1.38e-23\n beam = (1./60.*np.pi/180.)**2\n factor = 2*kb*(v*1e9/c)**2 * beam * 1e26\n return flux*factor, annu_rms*np.sqrt(np.sum(inner))*factor",
"def QuatMag(wxyz):\n return np.sqrt(np.sum(np.square(wxyz)))",
"def mi_to_m(radius):\n return int(float(radius) * 1609.34)",
"def run_ap_phot(data, fwhm, position=None):\n if type(position) == type(None):\n position = np.array(data.shape) // 2\n\n aperture = CircularAperture(position, r=fwhm)\n\n sky_annulus_aperture = CircularAnnulus(\n position, r_in=fwhm * 3, r_out=fwhm * 3 + 15\n )\n sky_annulus_mask = sky_annulus_aperture.to_mask(method=\"center\")\n sky_annulus_data = sky_annulus_mask.multiply(data)\n sky_annulus_data_1d = sky_annulus_data[sky_annulus_mask.data > 0]\n _, median_sigclip, _ = sigma_clipped_stats(sky_annulus_data_1d)\n\n aperture_bg = median_sigclip * aperture.area\n phot = aperture_photometry(data, aperture)\n\n apmag = (phot[\"aperture_sum\"] - aperture_bg)[0]\n\n skyvar = np.square(np.std(sky_annulus_data))\n phpadu = 1\n\n sigsq = skyvar / sky_annulus_aperture.area\n\n error1 = aperture.area * skyvar # Scatter in sky values\n error2 = (apmag > 0) / phpadu # Random photon noise\n error3 = sigsq * aperture.area**2 # Uncertainty in mean sky brightness\n magerr = np.sqrt(error1 + error2 + error3)\n\n return apmag, magerr",
"def dp_radius(self, s, survey='SPIRE_500'):\n shape = np.array(s[survey].shape)\n cosPA, sinPA = np.cos(s['PA_RAD']), np.sin(s['PA_RAD'])\n cosINCL = s['cosINCL']\n w = s[survey + '_WCS']\n xcm, ycm = s['RA_RAD'], s['DEC_RAD']\n dp_coords = np.zeros([shape[0], shape[1], 2])\n # Original coordinate is (y, x)\n # :1 --> x, RA --> the one needed to be divided by cos(incl)\n # :0 --> y, Dec\n dp_coords[:, :, 0], dp_coords[:, :, 1] = \\\n np.meshgrid(np.arange(shape[1]), np.arange(shape[0]))\n # Now, value inside dp_coords is (x, y)\n # :0 --> x, RA --> the one needed to be divided by cos(incl)\n # :1 --> y, Dec\n for i in range(shape[0]):\n dp_coords[i] = Angle(w.wcs_pix2world(dp_coords[i], 1) * u.deg).rad\n dp_coords[:, :, 0] = 0.5 * (dp_coords[:, :, 0] - xcm) * \\\n (np.cos(dp_coords[:, :, 1]) + np.cos(ycm))\n dp_coords[:, :, 1] -= ycm\n # Now, dp_coords is (dx, dy) in the original coordinate\n # cosPA*dy-sinPA*dx is new y\n # cosPA*dx+sinPA*dy is new x\n if survey[:5] == 'GALEX':\n return np.sqrt((cosPA * dp_coords[:, :, 1] +\n sinPA * dp_coords[:, :, 0])**2 +\n ((cosPA * dp_coords[:, :, 0] -\n sinPA * dp_coords[:, :, 1]))**2) * \\\n s['DIST_MPC'] * 1.0E3 # Radius in kpc\n else:\n return np.sqrt((cosPA * dp_coords[:, :, 1] +\n sinPA * dp_coords[:, :, 0])**2 +\n ((cosPA * dp_coords[:, :, 0] -\n sinPA * dp_coords[:, :, 1]) / cosINCL)**2) * \\\n s['DIST_MPC'] * 1.0E3 # Radius in kpc",
"def _calculate_residual_sphere(parameters, x_values, y_values, z_values):\n #extract the parameters\n x_centre, y_centre, z_centre, radius = parameters\n\n #use numpy's sqrt function here, which works by element on arrays\n distance_from_centre = numpy.sqrt((x_values - x_centre)**2 +\n (y_values - y_centre)**2 +\n (z_values - z_centre)**2)\n\n return distance_from_centre - radius",
"def compute_resolution(zoom, size_px):\n # Calibration data:\n dist_in_um = 10\n dist_in_px = np.array([21.13, 19.62, 8.93])\n zooms = np.array([1.5, 3, 4.5])\n image_max_sizes = np.array([330, 610, 410])\n \n return np.mean((dist_in_um/dist_in_px) * (zoom/zooms) * (image_max_sizes/size_px))",
"def scale(z,h=0.7,omegalambda=0.7,omegam=0.3,omegak=0.0):\n return distcalc(z,h,omegalambda,omegam,omegak)['scale']",
"def sphere(geometry,\n psd_name,psd_shape,psd_loc,psd_scale,\n pore_seed='pore.seed',\n psd_offset=0,\n **kwargs):\n import scipy.stats as spst\n prob_fn = getattr(spst,psd_name)\n P = prob_fn(psd_shape,loc=psd_loc,scale=psd_scale)\n value = P.ppf(geometry[pore_seed])+psd_offset\n return value",
"def get_amplitude_scaling_factor(s, n, snr, method='rms'):\n original_sn_rms_ratio = rms(s) / rms(n)\n target_sn_rms_ratio = 10. ** (float(snr) / 20.) # snr = 20 * lg(rms(s) / rms(n))\n signal_scaling_factor = target_sn_rms_ratio / original_sn_rms_ratio\n return signal_scaling_factor",
"def get_amplitude_scaling_factor(s, n, snr, method='rms'):\n original_sn_rms_ratio = rms(s) / rms(n)\n target_sn_rms_ratio = 10. ** (float(snr) / 20.) # snr = 20 * lg(rms(s) / rms(n))\n signal_scaling_factor = target_sn_rms_ratio / original_sn_rms_ratio\n return signal_scaling_factor",
"def boringInterlude (radiusIn):\n\n\n import math\n volIn = (4/3) * math.pi * (radiusIn ** 3)\n vol = volIn/ 1728\n return vol",
"def Mass_in_R(self, r):\n return self.int_over_density(r)",
"def _calculate_magnification(self, times):\n if self._model.n_lenses == 2:\n factor = 10.\n params = self._model.parameters\n t_1 = params.t_0 - factor * params.t_E\n t_2 = params.t_0 + factor * params.t_E\n self._model.set_magnification_methods([t_1, 'VBBL', t_2])\n self._model.set_default_magnification_method(\n 'point_source_point_lens')\n\n magnification = self._model.magnification(times)\n return magnification",
"def calculate_radius_mass(collector):\n accel = []\n alti = []\n for accel_val, alti_val in\\\n collector.get_iter('acceleration', 'altitude'):\n accel.append(accel_val)\n alti.append(alti_val)\n if len(alti) == 0:\n raise NoDataError('No altitude data to calculate radius/mass')\n return radius_mass(alti, accel, 1e3, 1e7)",
"def model_photometry(waves, f_nu, wavelength):\n\n if wavelength not in [70, 100, 160]: \n return f_nu[np.argmin(np.abs(np.log(waves / wavelength)))]\n\n pacs_filter_fname = pacs_filter_fnames[wavelength]\n\n filter_waves, transmission = np.genfromtxt(pacs_filter_fname,\n unpack=True)\n filter_waves *= 1e-4 # [um]\n \n # interpolate to same wavelengths used in SED\n within_filter = (waves > min(filter_waves)) & (waves < max(filter_waves))\n wave_range = waves[within_filter]\n func_interp = interp1d(filter_waves, transmission, kind='cubic')\n \n interp_transmission = func_interp(wave_range)\n \n flux_density = np.sum([T * f for T, f in \\\n zip(interp_transmission, f_nu[within_filter])]) / np.sum(interp_transmission)\n \n return flux_density",
"def computeA(diameter):\n radius = diameter / 2.0\n return np.pi * (radius**2)",
"def radial_transform(self, width):\n\n factor = width / 2. / sp.pi\n return lambda k_rad: sp.sinc(k_rad * factor)",
"def rms(signal, **kwargs):\n return np.sqrt(np.sum(signal**2))",
"def Mass_sch19(Radius, eRadius):\n a = -0.024048024\n b = 1.0552427\n a_err = 0.007592668\n b_err = 0.017044148\n M = a + b * Radius\n eM = np.sqrt((a_err)**2 + (Radius * b_err)**2 + (b * eRadius)**2)\n return(M, eM)",
"def radius_from_bmesh(bm, center):\n radius = max(\n [get_distance(center, to_revolt_coord(v.co)) for v in bm.verts]\n )\n return radius",
"def volume(self) -> float:\n return 4 / 3 * np.pi * self.radius**3",
"def get_mag_for_size(slide, size):\n max_size = slide.dimensions\n max_mag = highest_mag(slide)\n downsample = np.average([max_dim/size_dim for max_dim, size_dim in zip(max_size, size)])\n return max_mag/downsample",
"def surface_area_hemisphere(radius: float) -> float:\r\n if radius < 0:\r\n raise ValueError(\"surface_area_hemisphere() only accepts non-negative values\")\r\n return 3 * pi * radius**2",
"def omega_plasma(number_density, mass):\n return np.sqrt(4 * np.pi * number_density * cgs.e**2 / mass)",
"def calc_duration_rms(self, duration, osc_freq, osc_damping, m0, m1, m2):\n del (m0, m1, m2)\n\n power = 3.\n coef = 1. / 3.\n\n # This equation was rewritten in Boore and Thompson (2012).\n foo = 1. / (osc_freq * duration)\n dur_ratio = (1 + 1. / (2 * np.pi * osc_damping) *\n (foo / (1 + coef * foo ** power)))\n\n return duration * dur_ratio",
"def beam(xb,yb,zb,wx,wy,wavelen):\n\n zRx = np.pi * wx**2 / wavelen\n zRy = np.pi * wy**2 / wavelen \n \n sqrtX = np.sqrt( 1 + np.power(zb/zRx,2) ) \n sqrtY = np.sqrt( 1 + np.power(zb/zRy,2) ) \n intensity = np.exp( -2.*( np.power(xb/(wx*sqrtX ),2) \\\n + np.power(yb/(wy*sqrtY),2) )) / sqrtX / sqrtY\n return intensity",
"def calculate_rms(samples):\n chunk = pow(abs(samples), 2)\n return math.sqrt(chunk.mean())",
"def jam_axi_rms(surf_lum, sigma_lum, qobs_lum, surf_pot, sigma_pot, qobs_pot,\n inc, mbh, distance, xbin, ybin, ml=None, normpsf=1., pixang=0.,\n pixsize=0., plot=True, rms=None, erms=None, sigmapsf=0.,\n goodbins=None, quiet=False, beta=None, step=0., nrad=20,\n nang=10, rbh=0.01, tensor='zz', vmin=None, vmax=None, **kwargs):\n if beta is None:\n beta = np.zeros_like(surf_lum) # Anisotropy parameter beta = 1 - (sig_z/sig_R)**2\n if not (surf_lum.size == sigma_lum.size == qobs_lum.size == beta.size):\n raise ValueError(\"The luminous MGE components do not match\")\n if not (surf_pot.size == sigma_pot.size == qobs_pot.size):\n raise ValueError(\"The total mass MGE components do not match\")\n if xbin.size != ybin.size:\n raise ValueError(\"xbin and ybin do not match\")\n if rms is not None:\n if erms is None:\n erms = np.full_like(rms, np.median(rms)*0.05) # Constant ~5% errors\n if goodbins is None:\n goodbins = np.ones_like(rms, dtype=bool)\n elif goodbins.dtype != bool:\n raise ValueError(\"goodbins must be a boolean vector\")\n if not (xbin.size == rms.size == erms.size == goodbins.size):\n raise ValueError(\"(rms, erms, goodbins) and (xbin, ybin) do not match\")\n\n sigmapsf = np.atleast_1d(sigmapsf)\n normpsf = np.atleast_1d(normpsf)\n if sigmapsf.size != normpsf.size:\n raise ValueError(\"sigmaPSF and normPSF do not match\")\n\n pc = distance*np.pi/0.648 # Constant factor to convert arcsec --> pc\n\n surf_lum_pc = surf_lum\n surf_pot_pc = surf_pot\n sigma_lum_pc = sigma_lum*pc # Convert from arcsec to pc\n sigma_pot_pc = sigma_pot*pc # Convert from arcsec to pc\n xbin_pc = xbin*pc # Convert all distances to pc\n ybin_pc = ybin*pc\n pixSize_pc = pixsize*pc\n sigmaPsf_pc = sigmapsf*pc\n step_pc = step*pc\n\n # Add a Gaussian with small sigma and the same total mass as the BH.\n # The Gaussian provides an excellent representation of the second moments\n # of a point-like mass, to 1% accuracy out to a radius 2*sigmaBH.\n # The error increses to 14% at 1*sigmaBH, independently of the BH mass.\n #\n if mbh > 0:\n sigmaBH_pc = rbh*pc # Adopt for the BH just a very small size\n surfBH_pc = mbh/(2*np.pi*sigmaBH_pc**2)\n surf_pot_pc = np.append(surfBH_pc, surf_pot_pc) # Add Gaussian to potential only!\n sigma_pot_pc = np.append(sigmaBH_pc, sigma_pot_pc)\n qobs_pot = np.append(1., qobs_pot) # Make sure vectors do not have extra dimensions\n\n qobs_lum = qobs_lum.clip(0, 0.999)\n qobs_pot = qobs_pot.clip(0, 0.999)\n\n t = clock()\n rmsModel = _vrms2(xbin_pc, ybin_pc, inc, surf_lum_pc, sigma_lum_pc,\n qobs_lum, surf_pot_pc, sigma_pot_pc, qobs_pot, beta,\n tensor, sigmaPsf_pc, normpsf, pixSize_pc, pixang,\n step_pc, nrad, nang)\n if not quiet:\n print('jam_axi_rms elapsed time sec: %.2f' % (clock() - t))\n\n if tensor in ('xx', 'yy', 'zz'):\n rmsModel = np.sqrt(rmsModel.clip(0)) # Return SQRT and fix possible rounding errors\n if tensor in ('xy', 'xz'):\n rmsModel *= np.sign(xbin*ybin) # Calculation was done in positive quadrant\n\n # Analytic convolution of the MGE model with an MGE circular PSF\n # using Equations (4,5) of Cappellari (2002, MNRAS, 333, 400)\n #\n lum = surf_lum_pc*qobs_lum*sigma_lum**2 # Luminosity/(2np.pi) of each Gaussian\n flux = np.zeros_like(xbin) # Total MGE surface brightness for plotting\n for sigp, norp in zip(sigmapsf, normpsf): # loop over the PSF Gaussians\n sigmaX = np.sqrt(sigma_lum**2 + sigp**2)\n sigmaY = np.sqrt((sigma_lum*qobs_lum)**2 + sigp**2)\n surfConv = lum / (sigmaX*sigmaY) # PSF-convolved in Lsun/pc**2\n for srf, sx, sy in zip(surfConv, sigmaX, sigmaY): # loop over the galaxy MGE Gaussians\n flux += norp*srf*np.exp(-0.5*((xbin/sx)**2 + (ybin/sy)**2))\n\n if rms is None:\n\n chi2 = None\n if ml is None:\n ml = 1.\n else:\n rmsModel *= np.sqrt(ml)\n\n else:\n\n if (ml is None) or (ml <= 0):\n\n # y1, dy1 = rms, erms # (y1 are the data, y2 the model)\n # scale = sum(y1*y2/dy1**2)/sum(y2**2/dy1**2) # (equation 51)\n #\n ml = (np.sum(rms[goodbins]*rmsModel[goodbins]/erms[goodbins]**2)\n / np.sum((rmsModel[goodbins]/erms[goodbins])**2))**2\n\n rmsModel *= np.sqrt(ml)\n chi2 = np.sum(((rms[goodbins]-rmsModel[goodbins])/erms[goodbins])**2) / goodbins.sum()\n\n if not quiet:\n print('inc=%.1f beta_z=%.2f M/L=%.3g BH=%.2e chi2/DOF=%.3g' % (inc, beta[0], ml, mbh*ml, chi2))\n mass = 2*np.pi*surf_pot_pc*qobs_pot*sigma_pot_pc**2\n print('Total mass MGE: %.4g' % np.sum(mass*ml))\n\n if plot:\n\n rms1 = rms.copy() # Only symmetrize good bins\n rms1[goodbins] = symmetrize_velfield(xbin[goodbins], ybin[goodbins], rms[goodbins])\n\n if (vmin is None) or (vmax is None):\n vmin, vmax = stats.scoreatpercentile(rms1[goodbins], [0.5, 99.5]) # Could use np.percentile in Numpy 1.10\n\n plt.clf()\n plt.subplot(121)\n plot_velfield(xbin, ybin, rms1, vmin=vmin, vmax=vmax, flux=flux, **kwargs)\n plt.title(r\"Input $V_{\\rm rms}$\")\n\n plt.subplot(122)\n plot_velfield(xbin, ybin, rmsModel, vmin=vmin, vmax=vmax, flux=flux, **kwargs)\n plt.plot(xbin[~goodbins], ybin[~goodbins], 'ok', mec='white')\n plt.title(r\"Model $V_{\\rm rms}$\")\n plt.tick_params(labelleft='off')\n plt.subplots_adjust(wspace=0.03)\n\n return rmsModel, ml, chi2, flux",
"def sphere_to_center(p_sphere, resolution=0.5, scale=4, min_value=np.array([0., -50., -4.5])):\n center = p_sphere * (resolution*scale) + min_value\n return center",
"def center_to_sphere(places, size, resolution=0.50, min_value=np.array([0., -50., -4.5]), scale=4, x=(0, 90), y=(-50, 50), z=(-4.5, 5.5)):\n x_logical = np.logical_and((places[:, 0] < x[1]), (places[:, 0] >= x[0]))\n y_logical = np.logical_and((places[:, 1] < y[1]), (places[:, 1] >= y[0]))\n z_logical = np.logical_and((places[:, 2] < z[1]), (places[:, 2] >= z[0]))\n xyz_logical = np.logical_and(x_logical, np.logical_and(y_logical, z_logical))\n center = places.copy()\n center[:, 2] = center[:, 2] + size[:, 0] / 2.\n sphere_center = ((center[xyz_logical] - min_value) / (resolution * scale)).astype(np.int32)\n return sphere_center",
"def test_jam_axi_rms():\n np.random.seed(123)\n xbin, ybin = np.random.uniform(low=[-55, -40], high=[55, 40], size=[1000, 2]).T\n\n inc = 60. # Assumed galaxy inclination\n r = np.sqrt(xbin**2 + (ybin/np.cos(np.radians(inc)))**2) # Radius in the plane of the disk\n a = 40 # Scale length in arcsec\n vr = 2000*np.sqrt(r)/(r+a) # Assumed velocity profile\n vel = vr * np.sin(np.radians(inc))*xbin/r # Projected velocity field\n sig = 8700/(r+a) # Assumed velocity dispersion profile\n rms = np.sqrt(vel**2 + sig**2) # Vrms field in km/s\n\n surf = np.array([39483., 37158., 30646., 17759., 5955.1, 1203.5, 174.36, 21.105, 2.3599, 0.25493])\n sigma = np.array([0.153, 0.515, 1.58, 4.22, 10, 22.4, 48.8, 105, 227, 525])\n qObs = np.full_like(sigma, 0.57)\n\n distance = 16.5 # Assume Virgo distance in Mpc (Mei et al. 2007)\n mbh = 1e8 # Black hole mass in solar masses\n beta = np.full_like(surf, 0.3)\n\n surf_lum = surf # Assume self-consistency\n sigma_lum = sigma\n qobs_lum = qObs\n surf_pot = surf\n sigma_pot = sigma\n qobs_pot = qObs\n\n sigmapsf = 0.6\n pixsize = 0.8\n goodbins = r > 10 # Arbitrarily exclude the center to illustrate how to use goodbins\n\n # The model is similar but not identical to the adopted kinematics!\n rmsModel, ml, chi2, flux = jam_axi_rms(\n surf_lum, sigma_lum, qobs_lum, surf_pot, sigma_pot, qobs_pot,\n inc, mbh, distance, xbin, ybin, plot=True, rms=rms, sigmapsf=sigmapsf,\n beta=beta, pixsize=pixsize, tensor='zz', goodbins=goodbins)\n plt.pause(0.01)",
"def mass(self):\n\t\treturn self.volume*self.density",
"def semidiameter(radius, distance):\n\n return np.arcsin(radius / distance)",
"def calculate_wavelength_metric(wavelength_min, wavelength_max):\n length_max = np.log(550) * 2\n wavelength = np.abs(wavelength_max + wavelength_min) / 2\n log_wl = np.log(wavelength)\n default_met = np.array(log_wl / length_max)\n scaled_met = 1.75 * (default_met - 0.5) + 0.5\n if wavelength == 0:\n return 0\n else:\n return scaled_met.clip(min=10e-11, max=1)",
"def rad2pwm(self, x, sail_name):\n\t\tif sail_name == \"main\":\n\t\t\treturn (2/pi)*(self.pwm_max_main_sail - self.pwm_min_main_sail)*x + self.pwm_min_main_sail\n\t\telif sail_name == \"fore\":\n\t\t\treturn (2/pi)*(self.pwm_max_fore_sail - self.pwm_min_fore_sail)*x + self.pwm_min_fore_sail \n\t\telif sail_name == \"rudder\":\n\t\t\tx = x+pi/3\n\t\t\treturn (3/(2*pi))*(self.pwm_max_rudder - self.pwm_min_rudder)*x + self.pwm_min_rudder",
"def magnitude(p):\n return sqrt((p**2).sum())",
"def get_experimental_spectra(mol):\n\n data = pd.read_csv(mol, sep=',')\n wavelength = data.values[:, 0]\n\n absorption = data.values[:, 1]\n\n func = interp1d(wavelength, absorption, kind='quadratic')\n wavelength_new = 1. / np.linspace(1. / wavelength.max(), 1. / wavelength.min(), 100)\n absorption_new = func(wavelength_new)\n absorption_new *= 100. / absorption_new.max()\n\n return wavelength_new, absorption_new",
"def surface_area_sphere(radius: float) -> float:\r\n if radius < 0:\r\n raise ValueError(\"surface_area_sphere() only accepts non-negative values\")\r\n return 4 * pi * radius**2",
"def earth_radius(units=\"m\"):\n if \"m\" == units:\n return 6371000\n elif \"km\" == units:\n return 6371\n elif \"mi\" == units:\n return 3959",
"def radial_velocity(wv_obj, fx_obj, sig_obj, wv_std, fx_std, sig_std, obj_name, std_name, rv_std, rv_std_err, order,\n xcorr_width, cut, cutstart, cutend):\n\n # The more random iterations, the better... but it takes longer\n n_iter = 1000\n\n # Step 1: Fix the spectra:\n # * Select only the region in which they overlap\n # * Make a new stretched wavelength array (for sub-pixel precision work)\n # * Interpolate the data onto the new wavelength array\n # * Remove large scale slopes so we only compare line and band features\n\n # Find where standard and object overlap ---------------\n wv_min = max([min(wv_std), min(wv_obj)])\n wv_max = min([max(wv_std), max(wv_obj)])\n\n n_pix_std = len(wv_std)\n\n # Creates ln standard wavelength array ---------------------------------\n # AR 2013.0423 The wavelength array only covers the overlap region. Also, I'm folding the rebinning by 10 into this statement.\n acoef_std = (n_pix_std * 10 - 1) / (math.log(wv_max) - math.log(wv_min))\n bcoef_std = (n_pix_std * 10) - (acoef_std * math.log(wv_max))\n\n arr = np.arange(n_pix_std * 10) + 1\n wv_ln_std = np.exp((arr - bcoef_std) / acoef_std)\n\n # AR 2012.1018: Find the conversion between pixels and velocity. This will vary from instrument\n # to instrument and spectral order to spectral order, so we should preferentially calculate this\n # based on the actual input spectrum.\n # AR 2013.0422: Change the calculation to happen AFTER the corrected wavelength scale has been made\n # Find the average pixel/spectrum offset\n # Note: even though it's called micron_per_pix, it will still work if the wavelengths are\n # angstroms instead (it really converts <wavelength unit> to km/s)\n\n # Interpolate data onto same ln wavelength scale -------------------------------\n\n fx_interp_std = np.interp(wv_ln_std, wv_std, fx_std)\n fx_interp_obj = np.interp(wv_ln_std, wv_obj, fx_obj)\n sig_interp_std = np.interp(wv_ln_std, wv_std, sig_std) # AR 2012.1018 Also need to rebin sig\n sig_interp_obj = np.interp(wv_ln_std, wv_obj, sig_obj) # AR 2012.1018 Also need to rebin sig\n\n # Rebin Data ----------------------------\n\n wv_arr_std = np.asarray(wv_ln_std, dtype=float)\n fx_arr_obj = np.asarray(fx_interp_obj, dtype=float)\n fx_arr_std = np.asarray(fx_interp_std, dtype=float)\n sig_arr_obj = np.asarray(sig_interp_obj, dtype=float)\n sig_arr_std = np.asarray(sig_interp_std, dtype=float)\n\n datalen = len(fx_arr_obj)\n\n # Step 2: Measure vsini:\n # Note that as of 2015.0605, this doesn't actually work.\n\n # AR 2014.0922: For vsini:\n # In a loop:\n # Take the standard spectrum\n # broaden it to width X\n # autocorrelate,\n # measure width of gaussian Y (this is supposed to give you a means of translating between width-of-cross-correlation and vsini)\n # Fit function solving Y for X.\n # For each cross correlation of object and standard:\n # Determine vsini\n\n pix_scale = (2.99792458 * 10 ** 5) / acoef_std\n\n # vsinirange = [1,2,5,10,20,30,40,50,60,80,100,100]\n # widthrange = []\n # for v in vsinirange:\n # # Make convolution kernel for v km/s\n # kernel = lsf_rotate(pix_scale,v)\n # # Broaden the standard spectrum\n # fx_obj_wide = np.correlate(fx_arr_obj, kernel, mode='same')\n # # Rectify the spectrum\n # fx_obj_orig = (fx_arr_obj - np.mean(fx_arr_obj))/np.std(fx_arr_obj,ddof=1)\n # fx_obj_wide = (fx_obj_wide - np.mean(fx_obj_wide))/np.std(fx_obj_wide,ddof=1)\n #\n # # Remove a cubic (flatten the spectrum)\n # coeff,pcov = op.curve_fit(cubic,wv_arr_std,fx_obj_wide)\n # fx_obj_wide = fx_obj_wide - (coeff[0] + coeff[1]*wv_arr_std + coeff[2]*wv_arr_std**2 + coeff[3]*wv_arr_std**3)\n # coeff,pcov = op.curve_fit(cubic,wv_arr_std,fx_obj_orig)\n # fx_obj_orig = fx_obj_orig - (coeff[0] + coeff[1]*wv_arr_std + coeff[2]*wv_arr_std**2 + coeff[3]*wv_arr_std**3)\n #\n # # Cross-correlate the spectrum with its broadened self\n # ycorr = np.correlate(fx_obj_orig, fx_obj_wide, mode='full')\n # # Now determine where the peak is (should be near 0)\n # length = len(ycorr)\n # xcorr = np.arange(length) - length//2\n # xmid = np.argmax(ycorr)\n # ymax = np.max(ycorr)\n # # Chop out just the portion of the array near the peak\n # xcorr_min=xmid-xcorr_width\n # xcorr_max=xmid+xcorr_width\n # ycorr1=ycorr[xcorr_min:xcorr_max]\t#isolate section of array with gaussian\n # xcorr1=xcorr[xcorr_min:xcorr_max] #isolate the same section of the pixel range\n #\n # # set up initial values for gaussian fitting via chi2\n # sig = 10\n # sky = np.min(ycorr1)/1.2\n # # print ycorr1[-1],ycorr1[0],xcorr1[-1],xcorr1[0]\n # sky2 = (ycorr1[-1]-ycorr1[0])/(xcorr1[-1]-xcorr1[0])\n # lnamp = np.log(ymax/1.2-sky)\t# guess some values\n # mean = xcorr[xmid]\n #\n # amp = np.exp(lnamp)\n # sig2 = sig**2\n # # suggestion from D. Hogg 12/15/12: Add extra linear feature to fit.\n # # suggestion from D. Hogg 12/15/12: operate on ln(amp) so that the amplitude CANNOT be negative.\n # def chi2(p):\t#define gaussian function for fitting\n # sig2=p[2] ** 2\n # m = (np.exp(p[0]) * np.exp(-0.5 * (xcorr1 - p[1]) ** 2 / sig2)) + p[3] + p[4]*xcorr1\n # return (ycorr1 - m)\n #\n # # Fit the gaussian.\n # popt, ier = op.leastsq(chi2, [lnamp, mean, sig, sky, sky2])\n # lnamp, mean, sig, sky, sky2 = popt\n #\n # amp = np.exp(lnamp)\n # # record the width\n # widthrange.append(sig)\n #\n # # Plot all the widths to get a width-vsini curve\n # vsinicoeff,popt = op.curve_fit(quartic,np.asarray(widthrange),np.asarray(vsinirange))\n #\n # relationx = np.arange(50,200,1)\n # relationy = vsinicoeff[0]+vsinicoeff[1]*relationx+vsinicoeff[2]*relationx**2+vsinicoeff[3]*relationx**3+vsinicoeff[4]*relationx**4\n # figv = plt.figure(1)\n # axv = figv.add_subplot(211)\n # axv.scatter(widthrange,vsinirange)\n # axv.plot(relationx,relationy)\n # #ax.text(70,100,\"{0:} {1:} {2:} {3:} {4:}\".format(vsinicoeff))\n\n # 3. Cross-correlate the data, using n_iter trials:\n # * Generate two random gaussian noises scaled to the uncertainty on the fluxes\n # * Apply those gaussian noises to the standard and target stars\n # * Cross-correlate the standard and target stars\n # * Find and then cut out just the part of the cross-correlation curve near the maximum\n # * Set up gaussian\n # * Fit gaussian to that center part\n # * Save fitted parameters (pixel shift aka mean of gaussian, width aka stddev of gaussian)\n # * Repeat n_iter times\n\n # Cross correlation loop --------------------------------\n pix_shift = np.array([]) # initialize array for pixel shift values\n pix_width = np.zeros(n_iter) # initialize array for pixel width values\n l = 0\n\n # using the xrange generator rather than making a full list saves memory\n while len(pix_shift) < n_iter:\n # prepare the randomized data\n # GETTING ARRAYS READY FOR CROSS CORRELATION\n\n\n # Randomize noise:\n # create gaussian distribution of random numbers b/t 1 and -1, multiply err by numbers, add numbers to flux\n # I have drastically simplified the arrays here AR 2013.0319\n # AR 2013.0318: There was a problem, previously: noise was a fixed value, not linked to the known error values\n\n # AR 2013.0321: Speed fix. Rather than step through the array and generate one\n # normally-distributed error value scaled to the SNR at that point, I will generate an\n # array of normally-distributed error values scaled to 1, and then multiply by the SNR:\n # One array generation, one array multiplication.\n\n rand_dist = np.random.normal(loc=0.0, scale=1.0, size=datalen)\n rand_dist2 = np.random.normal(loc=0.0, scale=1.0, size=datalen)\n\n fx_temp_obj = np.asarray(fx_arr_obj + rand_dist * sig_arr_obj)\n fx_temp_std = np.asarray(fx_arr_std + rand_dist2 * sig_arr_std)\n mean_obj = np.mean(fx_temp_obj)\n mean_std = np.mean(fx_temp_std)\n stddev_obj = np.std(fx_temp_obj, ddof=1)\n stddev_std = np.std(fx_temp_std, ddof=1)\n\n # Regularize data (subtract mean, divide by std dev) (Should definitely be done AFTER noise was added)\n fx_reg_temp_obj = fx_temp_obj - mean_obj\n fx_reg_temp_obj = fx_reg_temp_obj / stddev_obj\n fx_reg_temp_std = fx_temp_std - mean_std\n fx_reg_temp_std = fx_reg_temp_std / stddev_std\n\n # curve fit - remove a cubic AR 2012.1113\n coeff, pcov = op.curve_fit(cubic, wv_arr_std, fx_reg_temp_obj)\n fx_reg_temp_obj = fx_reg_temp_obj - (\n coeff[0] + coeff[1] * wv_arr_std + coeff[2] * wv_arr_std ** 2 + coeff[3] * wv_arr_std ** 3)\n coeff, pcov = op.curve_fit(cubic, wv_arr_std, fx_reg_temp_std)\n fx_reg_temp_std = fx_reg_temp_std - (\n coeff[0] + coeff[1] * wv_arr_std + coeff[2] * wv_arr_std ** 2 + coeff[3] * wv_arr_std ** 3)\n\n # CROSS CORRELATION\n\n # compute the cross-correlation between the two spectra\n\n ycorr = np.correlate(fx_reg_temp_obj, fx_reg_temp_std, mode='full')\n # time required: 0.045 seconds average\n\n # http://stackoverflow.com/questions/12323959/fast-cross-correlation-method-in-python\n # conv1 = np.zeros(datalen * 2)\n # conv1[datalen/2:datalen/2+datalen] = fx_reg_temp_obj\n # conv2 = fx_reg_temp_std[::-1]\n # ycorr = signal.fftconvolve(conv1,conv2, mode='valid')\n # time required: 0.006 seconds average, but it segfaults by the third try.\n\n ## slight smoothing AR 2013.0315\n # ycorr = scipy.ndimage.filters.gaussian_filter1d(ycorr,11)\n\n # create the x offset axis (same length as ycorr, with 0 in the MIDDLE)\n length = len(ycorr)\n xcorr = np.arange(length) - length // 2\n # AR 2012.1126 Select a tiny piece around the maximum to fit with a gaussian.\n xmid = np.argmax(ycorr)\n ymax = np.max(ycorr)\n # now take just the portion of the array that matters\n xcorr_min = int(xmid - xcorr_width)\n xcorr_max = int(xmid + xcorr_width)\n ycorr1 = ycorr[xcorr_min:xcorr_max] # isolate section of array with gaussian\n xcorr1 = xcorr[xcorr_min:xcorr_max] # isolate the same section of the pixel range\n ycorr2 = ycorr[xcorr_min - 50:xcorr_max + 50]\n xcorr2 = xcorr[xcorr_min - 50:xcorr_max + 50]\n\n # suggestion from D. Hogg 12/15/12: Add extra linear feature to fit.\n # suggestion from D. Hogg 12/15/12: operate on ln(amp) so that the amplitude CANNOT be negative.\n def chi2(p): # define gaussian function for fitting\n sig2 = p[2] ** 2\n m = (np.exp(p[0]) * np.exp(-0.5 * (xcorr1 - p[1]) ** 2 / sig2)) + p[3] + p[4] * xcorr1\n return (ycorr1 - m)\n\n # set up initial values for chi2\n sig = 10\n sky = np.min(ycorr1) / 1.2\n # print ycorr1[-1],ycorr1[0],xcorr1[-1],xcorr1[0]\n sky2 = (ycorr1[-1] - ycorr1[0]) / (xcorr1[-1] - xcorr1[0])\n lnamp = np.log(ymax / 1.2 - sky) # guess some values\n mean = xcorr[xmid]\n\n amp = np.exp(lnamp)\n sig2 = sig ** 2\n\n popt, ier = op.leastsq(chi2, [lnamp, mean, sig, sky, sky2])\n lnamp, mean, sig, sky, sky2 = popt\n\n amp = np.exp(lnamp)\n\n # print_num=len(pix_shift)%100\n print_num = l % 100\n if print_num == 0:\n ## Uncomment the following to make a plot every 500 fits.\n # fig = plt.figure(l)\n # ax = fig.add_subplot(111)\n # my_gauss = (amp * (np.exp(-0.5 * ((xcorr1 - mean) ** 2) / sig**2))) + sky + sky2 * xcorr1\n # ax.plot(xcorr1,my_gauss,'r--')\n # ax.plot(xcorr2,ycorr2,'#000000')\n # ax.plot(xcorr1,ycorr1-my_gauss,'#00CC00')\n ##if abs(mean - xcorr[xmid]) > 5:\n ## print \"Mean is off\",mean,xcorr[xmid]\n # figname='rv_{0:}_{1:}_{2:}_{3:}.png'.format(std_name,obj_name,order,l)\n # ax.set_xlim(xcorr[xcorr_min-50],xcorr[xcorr_max+50])\n # fig.savefig(figname)\n # fig.clf()\n # plt.close()\n print\n \"amp={0: 12.4f} mu={1: 10.4f} sig={2: 9.4f} sky={3: 11.4f} sky2={4: 8.4f} n_entries={5:}\".format(amp,\n mean,\n sig,\n sky,\n sky2,\n len(\n pix_shift))\n\n l += 1\n if (cut == 0) | (mean > np.float(cutstart)) & (mean < np.float(cutend)):\n pix_shift = np.append(pix_shift, mean)\n # if ier < 5:\n # I'm calculating the vsini now because I need errors, and the vsini calculation is not linear.\n # pix_width[l] = vsinicoeff[0] + vsinicoeff[1] * sig + vsinicoeff[2] * sig**2 + vsinicoeff[3] * sig**3 + vsinicoeff[4] * sig**4\n\n # End cross correlation loop ---------------------------------\n\n # 4. Find the RV\n # All 5000 rv fits have been calculated and stored in arrays\n # 4a. Cut out outlier RVs. Useful if the cross-correlation produces occasional bad results. Use cutstart and cutend to force the code to only fit a gaussian to a certain region. Don't over-use this to force the result you want, though.\n # 4b. Compute the mean pixel shift and pixel shift uncertainty.\n # 4c. Convert pixel shift into RV\n # 4d. Shift the wavelength array appropriately - all lines should now line up.\n\n ## Uncomment this to print out an example cross-correlation diagram\n # fig = plt.figure(2)\n # ax = fig.add_subplot(111)\n # ax.plot(xcorr,ycorr,'k')\n # figname='rv_{0:}_{1:}_{2:}_xcorr.png'.format(std_name,obj_name,order)\n # fig.savefig(figname)\n # fig.clf()\n # plt.close()\n\n # Turn the list of pixel shifts into a numpy array\n pix_shift = np.asarray(pix_shift)\n\n # 4a. Cut out outliers from the pixel shift\n if cut == 1:\n pix_shift = pix_shift[np.where((pix_shift > np.float(cutstart)) & (pix_shift < np.float(cutend)))]\n\n # 4b. Compute the mean pixel shift (rv value) and pixel shift uncertainty (RV uncertainty).\n\n print\n l, len(pix_shift), np.float(len(pix_shift)) / np.float(n_iter) * 100.0\n\n mu = np.mean(pix_shift)\n sigma = np.std(pix_shift, ddof=1)\n\n # vsini = np.mean(pix_width)\n # vsini_err = np.std(pix_width,ddof=1)\n\n # axh = figv.add_subplot(212)\n # n, bins, patches=axh.hist(pix_width,bins=30,normed=1.0,facecolor='green',align='mid')\n # figv.savefig('vsiniplot.png')\n # plt.clf()\n # plt.close()\n\n # 4c. Transform pixel shift to shift in radial velocity\n\n # AR 2013.0423: The actually appropriate method requires a speed-of-light correction. This works for both angstroms and microns.\n rv_meas = (2.99792458 * 10 ** 5 * mu) / acoef_std\n rv_meas_err = (2.99792458 * 10 ** 5 * sigma) / acoef_std\n\n # 4d. Apply shift to arrays\n wv_rvcorr_obj = wv_arr_std * (1 - rv_meas / (2.99792458 * 10 ** 5))\n\n ## 5. Create plots ---------------------------------\n # The plots are the only reason find_rv.py needs to know the names of either star, or the RV of the standard.\n\n # Plot object and standard so you can clearly see that shift exists --------------------------------\n fig = plt.figure(1)\n\n # AR 2013.0703 Regularize the spectra for display purposes in the final graph\n # I'm using the mean and stddev of the last random-added attempt so it won't be perfect...\n fx_reg_obj = fx_arr_obj - mean_obj\n fx_reg_obj = fx_reg_obj / stddev_obj\n fx_reg_std = fx_arr_std - mean_std\n fx_reg_std = fx_arr_std / stddev_std\n\n # Plots target and standard with shift applied\n ax1 = fig.add_subplot(311)\n ax1.plot(wv_rvcorr_obj, fx_reg_obj, 'red')\n ax1.plot(wv_arr_std, fx_reg_std, 'blue')\n ax1.set_xlabel('wavelength (microns)')\n ax1.set_ylabel('normalized flux')\n target = 'Target: %s' % (obj_name)\n standard = 'Standard: %s' % (std_name)\n ax1.annotate(target, xy=(.7, .9), xycoords='axes fraction', xytext=(.6, .9), textcoords='axes fraction',\n color='red')\n ax1.annotate(standard, xy=(.7, .8), xycoords='axes fraction', xytext=(.6, .8), textcoords='axes fraction',\n color='blue')\n\n sig2 = sig ** 2\n my_gauss = (amp * (np.exp(-0.5 * ((xcorr1 - mu) ** 2) / sig2))) + sky + sky2 * xcorr1\n\n # Plots example of gaussian fit to cross correlation function\n ax2 = fig.add_subplot(312)\n ax2.plot(xcorr1, ycorr1, 'k.')\n ax2.plot(xcorr1, my_gauss, 'r--', linewidth=2)\n ax2.plot(xcorr1, ycorr1 - my_gauss, '#00CC00')\n ax2.set_xlabel('example of fit to cross correlation function')\n ax2.set_xlim(xcorr[xcorr_min - 50], xcorr[xcorr_max + 50])\n # print pix_shift\n\n\n ## Plot histogram of pixel shift values --------------------------------\n ax3 = fig.add_subplot(313)\n n, bins, patches = plt.hist(pix_shift, bins=30, normed=1.0, facecolor='green', align='mid')\n # Plot best fit gaussian over histogram\n y = mlab.normpdf(bins, mu, sigma)\n ax3.plot(bins, y, 'r--', linewidth=2)\n ax3.set_xlabel('radial velocity of target (pixels)')\n ax3.set_ylabel('frequency (normalized)')\n rad = 'RV = %.3f +/- %.3f' % (rv_meas, rv_meas_err)\n corr = 'RV (corr) = %.3f +/- %.3f' % (rv_std + rv_meas, (rv_std_err ** 2 + rv_meas_err ** 2) ** (0.5))\n # vsinistr = 'VsinI = %.3f +/- %.3f' % (vsini,vsini_err)\n ax3.annotate(rad, xy=(.66, .9), xycoords='axes fraction', xytext=(.66, .9), textcoords='axes fraction',\n color='black')\n ax3.annotate(corr, xy=(.6, .8), xycoords='axes fraction', xytext=(.60, .8), textcoords='axes fraction',\n color='black')\n # ax3.annotate(vsinistr,xy=(.6,.6),xycoords='axes fraction',xytext=(.60,.6),textcoords='axes fraction',color='black')\n ax3.annotate('{0:+5.2f} {1: 5.2f}'.format(mu, sigma), xy=(.05, .9), xycoords='axes fraction', xytext=(.05, .9),\n textcoords='axes fraction', color='black')\n ax3.annotate('{0:5.3f} km/s/pix'.format((2.99792458 * 10 ** 5) / acoef_std), xy=(.05, .8), xycoords='axes fraction',\n xytext=(.05, .8), textcoords='axes fraction', color='black')\n fig.subplots_adjust(hspace=.3)\n\n figname = 'rv_%s_%s_%d.png' % (std_name, obj_name, order)\n fig.savefig(figname)\n fig.clf()\n plt.close()\n\n # plt.figure(l+1)\n # plt.hist(pix_shift)\n\n # END RADIAL VELOCITY FUNCTION -----------------------------------------\n return rv_meas, rv_meas_err",
"def apply_absorption_correction(qz, scale):\n global t\n global mu\n global wavelength\n for i in xrange(len(scale)):\n #a = wavelength * qz[i] / 4 / math.pi\n a = 1.18 * qz[i] / 4 / math.pi\n theta = math.asin(a)\n g = 2 / math.sin(theta)\n Ac = t * g / mu / (1-math.exp(-t*g/mu))\n scale[i] = Ac * scale[i]",
"def calc(self, wavelength):\n if wavelength < self.minWavelength or wavelength > self.maxWavelength:\n return 0\n mm=wavelength%self.interval\n s=self._calcd(wavelength-mm)\n if mm==0:\n return s\n m=mm*1.0/self.interval\n e=self._calcd((wavelength-mm)+self.interval)\n return s+(e-s)*m",
"def quality(\n wavelength: Union[Quantity, ndarray],\n flux: Union[Quantity, ndarray],\n mask: Optional[ndarray] = None,\n **kwargs,\n) -> float:\n flux = flux * u.dimensionless_unscaled # Turn into Quantity if not already\n flux = flux / flux.unit # Remove units from flux (sqrt(N_e) is unitless)\n\n wis = sqrt_sum_wis(wavelength, flux, mask=mask, **kwargs)\n q = wis / np.sqrt(np.nansum(flux))\n return q.value"
] | [
"0.6156244",
"0.6068487",
"0.6068487",
"0.6032485",
"0.6008205",
"0.5966811",
"0.5919071",
"0.5859431",
"0.5852741",
"0.58379126",
"0.5818871",
"0.5818871",
"0.58174205",
"0.580654",
"0.5779134",
"0.57611865",
"0.57207704",
"0.5711433",
"0.57023317",
"0.56883377",
"0.5678015",
"0.56485325",
"0.56400514",
"0.5625404",
"0.55850905",
"0.5578161",
"0.5557369",
"0.55540943",
"0.5530696",
"0.55258757",
"0.5509129",
"0.5490875",
"0.547733",
"0.5472493",
"0.54680526",
"0.54657125",
"0.54528654",
"0.54449856",
"0.5425347",
"0.5416385",
"0.5415863",
"0.5387322",
"0.5386796",
"0.53721464",
"0.53717655",
"0.53708404",
"0.5350864",
"0.53413486",
"0.53367734",
"0.5317961",
"0.5313727",
"0.5313349",
"0.5311173",
"0.53092283",
"0.5302973",
"0.5295286",
"0.52917844",
"0.52732116",
"0.5264197",
"0.5259309",
"0.52541035",
"0.52537423",
"0.5243043",
"0.52377504",
"0.5231903",
"0.52218425",
"0.52218425",
"0.52210724",
"0.52178955",
"0.5216005",
"0.5213285",
"0.51982784",
"0.5188782",
"0.5181391",
"0.51789856",
"0.5176053",
"0.5174724",
"0.5171417",
"0.5168681",
"0.51683974",
"0.5165431",
"0.5161748",
"0.51496613",
"0.51477414",
"0.51431346",
"0.51415676",
"0.5136815",
"0.51323676",
"0.5129122",
"0.51252896",
"0.5123852",
"0.51184297",
"0.5115427",
"0.51131254",
"0.51120377",
"0.5109955",
"0.51040655",
"0.5103667",
"0.50955445",
"0.5091736"
] | 0.5964626 | 6 |
Compute the aperture radius necessary to have a certain SPAXEL SCALE [in mas] at a certain WAVELENGTH [in microns] That would be the aperture radius in an array ranging from [1, 1] in physical length For example, if rho = 0.5, then the necessary aperture is a circle of half the size of the array We can use the inverse of that to get the "oversize" in physical units in our arrays to match a given scale | def rho_spaxel_scale(spaxel_scale=4.0, wavelength=1.0):
scale_rad = spaxel_scale / MILIARCSECS_IN_A_RAD
rho = scale_rad * ELT_DIAM / (wavelength * 1e-6)
return rho | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def spaxel_scale(scale=4, wave=1.0):\n\n scale_rad = scale / MILIARCSECS_IN_A_RAD\n rho = scale_rad * ELT_DIAM / (wave * 1e-6)\n print(rho)",
"def check_spaxel_scale(rho_aper, wavelength):\n\n SPAXEL_RAD = rho_aper * wavelength / ELT_DIAM * 1e-6\n SPAXEL_MAS = SPAXEL_RAD * MILIARCSECS_IN_A_RAD\n print('%.2f mas spaxels at %.2f microns' %(SPAXEL_MAS, wavelength))",
"def _compute_mass(box_size, evo_config):\n\n # ensure format\n standard_volume = evo_config['individuals']['standard_volume']\n if isinstance(box_size, list):\n if len(box_size) == 1: # sphere\n box_size = box_size[0]\n box_size = np.asarray(box_size)\n\n if np.prod(box_size.shape) < 2: # sphere\n return 4 / 3 * np.pi * box_size**3 / standard_volume\n else: # box\n if np.ndim(box_size) == 1:\n return np.prod(box_size * 2) / standard_volume\n else:\n return np.prod(box_size * 2, axis=1) / standard_volume",
"def sphere_volume(sphere_radius):\n return (4 / 3 * np.pi * sphere_radius**3)",
"def totalMass(self, trunc=None):\n if trunc is None:\n trunc = self.trunc\n rVir = self.U.rVir(m, z)\n rS, rhoS, c = self.rS_rhoS_c(m, z)\n # truncation radius over scale radius\n xMax = trunc * rVir/rS\n result = 4./3. * np.pi * rS**3 * rhoS\n result = xMax - np.log(1 + xMax)\n return result",
"def totalMass(self, trunc=None):\n if trunc is None:\n trunc = self.trunc\n rVir = self.U.rVir(m, z)\n rS, rhoS, c = self.rS_rhoS_c(m, z)\n # truncation radius over scale radius\n xMax = trunc * rVir/rS\n result = 4./3. * np.pi * rS**3 * rhoS\n result = xMax - np.log(1 + xMax)\n return result",
"def getSphereRadius(self):\n return 1.5",
"def sphere_volume(r):\n return (4/3) * 3.14159 * r**3",
"def sphere_volume(r):\n\treturn 4/3. * math.pi * r ** 3",
"def sphere_volume(radius : number) -> number:\n volume = 4/3*(pi*radius*radius*radius)\n return volume",
"def sphrad(vol):\n return (3.*vol/(4.*np.pi))**(1./3.)",
"def rscale(mag=10.0):\n if mag > 11.5:\n return 0.5\n elif mag > 11.0:\n return 1.0\n elif mag > 10.5:\n return 1.5\n elif mag > 10.0:\n return 1.5\n elif mag > 9.5:\n return 2.0\n elif mag > 9.0:\n return 2.5\n elif mag > 8.5:\n return 3.0\n else:\n return 3.5",
"def sphere_sre(solution):\n a = 0\n bias = 0.2\n x = solution.get_x()\n x1 = x[:10]\n x2 = x[10:]\n value1 = sum([(i-bias)*(i-bias) for i in x1])\n value2 = 1/len(x) * sum([(i-bias)*(i-bias) for i in x2])\n return value1 + value2",
"def get_mag_for_size(slide, size):\n max_size = slide.dimensions\n max_mag = highest_mag(slide)\n downsample = np.average([max_dim/size_dim for max_dim, size_dim in zip(max_size, size)])\n return max_mag/downsample",
"def airy_and_slicer(surface, wavelength, scale_mas, PSF_window, N_window):\n\n # Print message to know we are updating the cache\n print('Recalculating Airy Pattern for %.3f microns' % wavelength)\n\n # Plate scales [Px, Py] for each spaxel scale in mm / arcsec,\n # depending on the surface [IS: Image Slicer, DET: Detector]\n plate_scales = {'IS': {4.0: [125, 250], 60.0: [16.67, 16.67]},\n 'DET': {4.0: [3.75, 7.5], 60.0: [0.5, 0.5]}}\n plate_x = plate_scales[surface][scale_mas][0]\n plate_y = plate_scales[surface][scale_mas][1]\n\n # We know how many Microns the pixels of the Geometric PSF span [PSF_window / N_window]\n pix_sampling = PSF_window / N_window # micron at the detector plane\n # Using the plate scale we calculate how many m.a.s each of those pixels have to span\n pix_scale_x = pix_sampling / plate_x # milliarcsec / pixel\n pix_scale_y = pix_sampling / plate_y # milliarcsec / pixel\n\n # Calculate the relative size of the pupil aperture needed to ensure the PSF is\n # sampled with the given pix_scale at the focal plane\n ELT_DIAM = 39\n MILIARCSECS_IN_A_RAD = 206265000\n pix_rad_x = pix_scale_x / MILIARCSECS_IN_A_RAD # radians / pixel\n pix_rad_y = pix_scale_y / MILIARCSECS_IN_A_RAD\n RHO_APER_x = pix_rad_x * ELT_DIAM / (wavelength * 1e-6)\n RHO_APER_y = pix_rad_y * ELT_DIAM / (wavelength * 1e-6)\n RHO_OBSC_x = 0.30 * RHO_APER_x # ELT central obscuration\n RHO_OBSC_y = 0.30 * RHO_APER_y # ELT central obscuration\n\n # Sanity check\n PIX_RAD_x = RHO_APER_x * wavelength / ELT_DIAM * 1e-6\n PIX_RAD_y = RHO_APER_y * wavelength / ELT_DIAM * 1e-6\n PIX_MAS_x = PIX_RAD_x * MILIARCSECS_IN_A_RAD\n PIX_MAS_y = PIX_RAD_y * MILIARCSECS_IN_A_RAD\n\n # Define the ELT pupil mask. Note that we use a central obscuration too\n N = 2048\n x = np.linspace(-1, 1, N)\n xx, yy = np.meshgrid(x, x)\n\n # To get the anamorphic scaling we define the equation for an ellipse\n rho = np.sqrt((xx / RHO_APER_x) ** 2 + (yy / RHO_APER_y) ** 2)\n\n # (1) Propagate to the Image Slicer Focal plane\n elt_mask = (RHO_OBSC_x / RHO_APER_x < rho) & (rho < 1.0)\n pupil = elt_mask * np.exp(1j * elt_mask)\n image_electric = fftshift(fft2(pupil))\n\n if surface == 'IS':\n # print(\"IS\")\n # We are already at the Image Slicer, don't do anything else\n min_pix, max_pix = N // 2 - N_window // 2, N // 2 + N_window // 2\n final_psf = (np.abs(image_electric))**2\n final_psf /= np.max(final_psf)\n crop_psf = final_psf[min_pix:max_pix, min_pix:max_pix]\n\n elif surface == 'DET':\n # print(\"DET\")\n # (1.1) Add slicer effect by masking\n # We mask the PSF covering a band of size 1x SPAXEL, depending on the scale\n # If we have 4x4 mas, then we cover a band of 4 mas over the PSF\n x_min, x_max = -N/2 * PIX_MAS_x, N/2 * PIX_MAS_x\n y_min, y_max = -N/2 * PIX_MAS_y, N/2 * PIX_MAS_y\n x_slice = np.linspace(x_min, x_max, N, endpoint=True)\n y_slice = np.linspace(y_min, y_max, N, endpoint=True)\n x_grid, y_grid = np.meshgrid(x_slice, y_slice)\n slicer_mask = np.abs(y_grid) < scale_mas / 2\n\n # ## Show the PSF both in [mas] space where it should be circular and in [pixel] space where it should be anamorphic\n # fig, ax = plt.subplots(1, 1)\n # img1 = ax.imshow((np.abs(image_electric))**2, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # # plt.colorbar(img1, ax=ax)\n # ax.set_title(r'Airy Pattern | %.1f mas scale | Wavelength: %.3f $\\mu$m' % (scale_mas, wavelength))\n # ax.set_xlabel(r'X [mas]')\n # ax.set_ylabel(r'Y [mas]')\n # ax.set_xlim([-10, 10])\n # ax.set_ylim([-10, 10])\n #\n # fig, ax = plt.subplots(1, 1)\n # img1 = ax.imshow((np.abs(image_electric))**2, extent=[-N/2, N/2, -N/2, N/2], cmap='bwr')\n # ax.set_title(r'Airy Pattern | %.1f mas scale | Wavelength: %.3f $\\mu$m' % (scale_mas, wavelength))\n # ax.set_xlabel(r'Pixels [ ]')\n # ax.set_ylabel(r'Pixels [ ]')\n # ax.set_xlim([-100, 100])\n # ax.set_ylim([-100, 100])\n\n # plt.show()\n\n # (2) Propagate the masked electric field to Pupil Plane\n pup_grating = ifft2(fftshift(slicer_mask * image_electric))\n # (2.1) Add pupil mask, this time without the central obscuration\n aperture_mask = rho < 1.0\n\n # (3) Propagate back to Focal Plane\n final_focal = fftshift(fft2(aperture_mask * pup_grating))\n final_psf = (np.abs(final_focal))**2\n final_psf /= np.max(final_psf)\n\n # (4) Crop the PSF to fit to the necessary window to ease the convolutions\n min_pix, max_pix = N//2 - N_window//2, N//2 + N_window//2\n crop_psf = final_psf[min_pix:max_pix, min_pix:max_pix]\n\n # If we want to show the plots for Documentation\n\n # fig, (ax1, ax2, ax3) = plt.subplots(1, 3)\n # psf_airy = (np.abs(image_electric))**2\n # img1 = ax1.imshow(psf_airy, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # ax1.axhline(y=scale_mas/2, linestyle='--', color='black')\n # ax1.axhline(y=-scale_mas/2, linestyle='--', color='black')\n # ax1.set_xlabel(r'X [mas]')\n # ax1.set_ylabel(r'Y [mas]')\n # ax1.set_xlim([-15, 15])\n # ax1.set_ylim([-15, 15])\n # ax1.set_title(r'Airy Pattern | Slicer Mask %.1f mas' % scale_mas)\n #\n # img2 = ax2.imshow(aperture_mask * (np.abs(pup_grating)**2), extent=[-1, 1, -1, 1], cmap='bwr')\n # ax2.set_title(r'Pupil Plane | Aperture Mask')\n # ax2.set_xlim([-0.25, 0.25])\n # ax2.set_ylim([-0.25, 0.25])\n #\n # img3 = ax3.imshow(final_psf, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # ax3.set_xlabel(r'X [mas]')\n # ax3.set_ylabel(r'Y [mas]')\n # ax3.set_xlim([-15, 15])\n # ax3.set_ylim([-15, 15])\n # ax3.set_title(r'Diffraction Effects')\n # plt.show()\n\n return crop_psf",
"def sphvol(r):\n return (4./3.)*np.pi*(r**3.)",
"def nsphere_volume(n, r):\n return math.pi ** (n / 2) * (r ** n) / gamma(n / 2 + 1)",
"def sphere_r_intensity(img):\n pixels = []\n for j in range(0, img.shape[0]):\n for i in range(1, 40):\n pixels.append(img[j, img.shape[1] - i])\n\n return np.mean(pixels)",
"def rms(a):\n\treturn np.sqrt(np.sum(np.power(a,2))/len(a))",
"def spherearea(dia):\n r = dia*1e-4 # convert to cm\n return(4*np.pi*r**2)",
"def get_scale(units, compartmentId, volume, extracellularVolume):\r\n if compartmentId == 'c':\r\n V = volume\r\n else:\r\n V = extracellularVolume\r\n\r\n if units == 'uM':\r\n return 1. / N_AVOGADRO / V * 1e6\r\n elif units == 'mM':\r\n return 1. / N_AVOGADRO / V * 1e3\r\n elif units == 'molecules':\r\n return 1.\r\n else:\r\n raise Exception('Invalid units \"%s\"' % units)",
"def surfaceIntSphere(r: float) -> float:\n return 4.0 * np.pi * r * r",
"def sphere_area(radius : number) -> number:\n area = 4*pi*radius*radius\n return area",
"def asphericity(Rnm_eg):\n num = (Rnm_eg[0] - Rnm_eg[2])**2 + (Rnm_eg[1] - Rnm_eg[2])**2 + (Rnm_eg[0] - Rnm_eg[1])**2\n dem = 2*(Rnm_eg[0] + Rnm_eg[1] + Rnm_eg[2])**2\n Asphere = num/dem\n return Asphere",
"def asphericity(Rnm_eg):\n num = (Rnm_eg[0] - Rnm_eg[2])**2 + (Rnm_eg[1] - Rnm_eg[2])**2 + (Rnm_eg[0] - Rnm_eg[1])**2\n dem = 2*(Rnm_eg[0] + Rnm_eg[1] + Rnm_eg[2])**2\n Asphere = num/dem\n return Asphere",
"def calc_hypersphere_volume(r: float, n: int) -> float:\n return (math.pi ** (n / 2) * r ** n) / gamma((n / 2) + 1)",
"def apply_spectral_radius(w,spectral_radius):\n assert len(w.shape)==2 and w.shape[0]==w.shape[1],\\\n \"Error: apply_spectral_radius must receive 'w' as a square matrix.\"\n\n new_w = np.array(w)\n spectral_radius_w = calc_spectral_radius(w)\n if spectral_radius_w > 0.0:\n new_w = (w / spectral_radius_w) * spectral_radius\n else:\n print(\"Warning: Spectral radius of 'w' is zero (because of small size). Therefore, spectral radius does not changed.\")\n\n return new_w",
"def beam_radius(self, x, Amp, beam_type='vortex', Amp_Flag=True):\r\n \r\n# dx = x[[0],[1]]-x[[0],[0]]\r\n# \r\n# Intensity = (Amp*Amp.conjugate()).real\r\n# N,N = Amp.shape\r\n# \r\n# if beam_type == 'vortex':\r\n# \r\n# \r\n# m,n = matrix_Lib.getPositon(Intensity)\r\n# \r\n# elif beam_type == 'gauss':\r\n# \r\n# m,n = matrix_Lib.getPositon(Intensity,value=np.max(Intensity)/np.e**2)\r\n# \r\n# # cartesian coordinate only;\r\n# radius = np.sqrt(((m-N/2)*dx)**2+((n-N/2)*dx)**2)\r\n# \r\n# return radius\r\n \r\n dx = x[[0],[1]]-x[[0],[0]]\r\n \r\n if Amp_Flag:\r\n Intensity = (Amp*Amp.conjugate()).real\r\n else:\r\n Intensity = Amp\r\n \r\n N,N = Amp.shape\r\n \r\n if beam_type == 'vortex':\r\n \r\n radius = 0\r\n Max = np.max(Intensity)\r\n \r\n NumofDots = 0\r\n \r\n for i in range(N):\r\n for j in range(N):\r\n if Intensity[i,j] > math.floor(Max*1e8)/1e8:\r\n radius += np.sqrt(((i-N/2)*dx)**2+((j-N/2)*dx)**2)\r\n NumofDots += 1\r\n \r\n radius = radius/NumofDots\r\n \r\n elif beam_type == 'gauss':\r\n \r\n m,n = self.getPositon(Intensity, value = np.max(Intensity)/np.e**2)\r\n # appropriate for cartesian coordinate only;\r\n radius = np.sqrt(((m-N/2)*dx)**2+((n-N/2)*dx)**2)\r\n \r\n return radius*2",
"def calculate_soma_surface(data: Data) -> float:\n\n soma = data.morphology.get_soma()\n return 4.0 * math.pi * soma['radius'] * soma['radius']",
"def resolution(self, radius, wave = None):\n dev = Prism.minDeviation(self,wave)\n alpha = dev/2 + self.angle/2\n\n # Form path difference between top and bottom of the beam\n d = 4*radius*math.sin(self.angle/2)/math.cos(alpha)\n dmax = 2.0*self.height*math.tan(self.angle/2) # Length of bottom of prism\n if d > dmax:\n d = dmax\n print(\"Resolution limited by size of prism\")\n\n\n dn = self.n.getDerivative(wave) # dn/d lambda\n return 1000*d*dn # scale to microms",
"def Calc_axe_spheroid(r,c):\n return np.sqrt((r**3)/c)",
"def get_amplitude_scaling_factor(s, n, snr, method='rms'):\n original_sn_rms_ratio = rms(s) / rms(n)\n target_sn_rms_ratio = 10. ** (float(snr) / 20.) # snr = 20 * lg(rms(s) / rms(n))\n signal_scaling_factor = target_sn_rms_ratio / original_sn_rms_ratio\n return signal_scaling_factor",
"def get_amplitude_scaling_factor(s, n, snr, method='rms'):\n original_sn_rms_ratio = rms(s) / rms(n)\n target_sn_rms_ratio = 10. ** (float(snr) / 20.) # snr = 20 * lg(rms(s) / rms(n))\n signal_scaling_factor = target_sn_rms_ratio / original_sn_rms_ratio\n return signal_scaling_factor",
"def center_to_sphere(places, size, resolution=0.50, min_value=np.array([0., -50., -4.5]), scale=4, x=(0, 90), y=(-50, 50), z=(-4.5, 5.5)):\n x_logical = np.logical_and((places[:, 0] < x[1]), (places[:, 0] >= x[0]))\n y_logical = np.logical_and((places[:, 1] < y[1]), (places[:, 1] >= y[0]))\n z_logical = np.logical_and((places[:, 2] < z[1]), (places[:, 2] >= z[0]))\n xyz_logical = np.logical_and(x_logical, np.logical_and(y_logical, z_logical))\n center = places.copy()\n center[:, 2] = center[:, 2] + size[:, 0] / 2.\n sphere_center = ((center[xyz_logical] - min_value) / (resolution * scale)).astype(np.int32)\n return sphere_center",
"def compute_resolution(zoom, size_px):\n # Calibration data:\n dist_in_um = 10\n dist_in_px = np.array([21.13, 19.62, 8.93])\n zooms = np.array([1.5, 3, 4.5])\n image_max_sizes = np.array([330, 610, 410])\n \n return np.mean((dist_in_um/dist_in_px) * (zoom/zooms) * (image_max_sizes/size_px))",
"def sphereVolume(radius):\n volume = (4 / 3) * math.pi * radius ** 3\n return volume",
"def magnitude_ratio(magnitudes):\n median = np.median(magnitudes)\n\n points_above_median = magnitudes[magnitudes > median].size\n\n return points_above_median / magnitudes.size",
"def get_radius(size):\n return (size * 10) - 5",
"def get_size_for_mag(slide, mag):\n max_size = slide.dimensions\n max_mag = highest_mag(slide)\n downsample = max_mag/mag\n return [np.int(np.round(dim/downsample)) for dim in max_size]",
"def calc_rhoenc(mass,r,rmax):\n idx = (r<rmax)\n return mass[idx].sum()/sphvol(rmax)",
"def scale_mag_1(x):\n return np.array([np.true_divide(ui, mag(x)) for ui in x])",
"def sigma(x: npt.NDArray, y: npt.NDArray, xc: float, yc: float, r: float) -> float:\n dx = x - xc\n dy = y - yc\n s: float = np.sqrt(np.mean((np.sqrt(dx ** 2 + dy ** 2) - r) ** 2))\n return s",
"def scale(z,h=0.7,omegalambda=0.7,omegam=0.3,omegak=0.0):\n return distcalc(z,h,omegalambda,omegam,omegak)['scale']",
"def fRes(self, mag, size, ps):\n x = 2 * np.pi * mag / (size[0] * ps[0])\n y = 2 * np.pi * mag / (size[1] * ps[1])\n return [x, y]",
"def sphereArea(radius):\n area = 4 * math.pi * radius ** 2\n return area",
"def filled_sphere(shape, radius, center=None):\n\tr2 = radius*radius\n\tif center is None:\n\t\t### set to center of array\n\t\tcenter = (shape[0]-1)/2.0,(shape[1]-1)/2.0,(shape[2]-1)/2.0\n\tdef func(i0, i1, i2):\n\t\tii0 = i0 - center[0]\n\t\tii1 = i1 - center[1]\n\t\tii2 = i2 - center[2]\n\t\trr2 = ii0**2 + ii1**2 + ii2**2\n\t\tc = numpy.where(rr2<r2, 0.0, 1.0)\n\t\treturn c\n\treturn numpy.fromfunction(func, shape)",
"def sphere(self, x):\r\n # return np.random.rand(1)[0]**0 * sum(x**2) + 1 * np.random.rand(1)[0]\r\n return sum((x+0)**2)",
"def calcul_v_sphere(r):\n volume = 4/3 * math.pi * (r ** 3)\n return volume",
"def beamradius(params,z):\n \n w0=params[0] # beam width at waist [e.g. meters]\n zw=params[1] # waist position [e.g. meters]\n lam = params[2] # wavelength [meters]\n \n zR=np.pi*w0**2/lam # Raleigh length [e.g. meters]\n w=w0*np.sqrt(1+((z-zw)/zR)**2) # beam width at z [e.g. meters]\n R=z*(1+(zR/z)**2) # beam phasefront curvature at z\n\n return w,R,zR # values at pos z [e.g. meters]",
"def Mass_in_R(self, r):\n return self.int_over_density(r)",
"def _mass_radius_relation(R, A, n):\n\n M = A * (R ** ((3 - n) / (1 - n)))\n return M",
"def _calculate_residual_sphere(parameters, x_values, y_values, z_values):\n #extract the parameters\n x_centre, y_centre, z_centre, radius = parameters\n\n #use numpy's sqrt function here, which works by element on arrays\n distance_from_centre = numpy.sqrt((x_values - x_centre)**2 +\n (y_values - y_centre)**2 +\n (z_values - z_centre)**2)\n\n return distance_from_centre - radius",
"def stdsize(image,r=30):\n image = square(image)\n s,_ = image.shape\n return interpolation.zoom(image,(r+0.5)/float(s))",
"def adapt_length_scale(self):\n Ne = max(1,self.Ne)\n Nc = max(1,self.Nc)\n ratio = Ne/(Ne+Nc)\n self.mu *= 2*ratio",
"def Radius(self, *args):\n return _Bnd.Bnd_Sphere_Radius(self, *args)",
"def spectral(w, s=1.0):\n n_in, n_out = w.size()\n n = max(n_out, n_in)\n gain = s / math.sqrt(n)\n return w.normal_(0, 1).mul_(gain)",
"def omega_plasma(number_density, mass):\n return np.sqrt(4 * np.pi * number_density * cgs.e**2 / mass)",
"def sphere_to_center(p_sphere, resolution=0.5, scale=4, min_value=np.array([0., -50., -4.5])):\n center = p_sphere * (resolution*scale) + min_value\n return center",
"def get_scale_parameter(self):\n\n shape_in_gamma_func = float(1 + (1 / self._shape_parameter))\n gamma_func = special.gamma(shape_in_gamma_func)\n self._scale_parameter = self._mean_fire_recurrence / gamma_func",
"def find_radius(mass,delta_m,eta,xi,mue,pp_factor):\n\n #range of radii; reason in detail under step 9 of report\n r_low = 0.01*Rsun # MKS\n r_high = 3*Rsun # MKS\n \n radius = brentq(lum_difference, r_low, r_high, xtol=1.0e-4, args = (mass,delta_m,eta,xi,mue,pp_factor))\n return radius",
"def circular_aperture(self, r, scale=True):\n if scale:\n radius = r * self.a\n else:\n radius = r\n return CircularAperture(self.coords, float(np.abs(radius)))",
"def scale(structure):\n from numpy.linalg import det\n if \"O\" in [atom.type for atom in structure]: spvol = 8.5**3/4e0\n elif \"Se\" in [atom.type for atom in structure]: spvol = 9.5**3/4e0\n elif \"Te\" in [atom.type for atom in structure]: spvol = 10.5**3/4e0\n else: raise ValueError(\"unknown atom.type: %s\" % (atom.type,))\n\n nfu = float(len(structure)/7)*0.5 # 0.5 because 2 f.u. in spinel unit-cell.\n vol = det(structure.cell)\n return (nfu * spvol / vol)**(1e0/3e0)",
"def sigma_R(field, scale):\n field_filtered = filter_Field(field, tophat_kernel, (scale,))\n return field_filtered.t.std()",
"def r_cor_bor(magnitudes):\n median_mag = np.median(magnitudes)\n\n above_1_5_median = magnitudes[magnitudes > (median_mag + 1.5)]\n\n return above_1_5_median.size / magnitudes.size",
"def s_multiplier(self):\n return 4 * np.pi * (self.bins[:, 1]/2)**2",
"def radius_from_bmesh(bm, center):\n radius = max(\n [get_distance(center, to_revolt_coord(v.co)) for v in bm.verts]\n )\n return radius",
"def rS_rhoS_c(self, m, z):\n Rvir = self.U.rVir(m, z)\n # concentration parameter\n #c = 10./(1.+z) * (m / self.m_nonlin)**(-0.2) # from Takada & Jain 2002\n c = 9./(1.+z) * (m / self.m_nonlin)**(-0.13) # Takada & Jain 2003\n # scale radius\n RS = Rvir / c # in Mpc/h\n # normalize the mass within rVir to be mVir\n rhoS = m / (4.*np.pi*RS**3)\n rhoS /= np.log(1.+c) - c/(1.+c) # (Msun/h) / (Mpc/h)^3\n return RS, rhoS, c",
"def rS_rhoS_c(self, m, z):\n Rvir = self.U.rVir(m, z)\n # concentration parameter\n #c = 10./(1.+z) * (m / self.m_nonlin)**(-0.2) # from Takada & Jain 2002\n c = 9./(1.+z) * (m / self.m_nonlin)**(-0.13) # Takada & Jain 2003\n # scale radius\n RS = Rvir / c # in Mpc/h\n # normalize the mass within rVir to be mVir\n rhoS = m / (4.*np.pi*RS**3)\n rhoS /= np.log(1.+c) - c/(1.+c) # (Msun/h) / (Mpc/h)^3\n return RS, rhoS, c",
"def scale(self):\n return self.scale_factor / CONSTANTS.AU",
"def apply_absorption_correction(qz, scale):\n global t\n global mu\n global wavelength\n for i in xrange(len(scale)):\n #a = wavelength * qz[i] / 4 / math.pi\n a = 1.18 * qz[i] / 4 / math.pi\n theta = math.asin(a)\n g = 2 / math.sin(theta)\n Ac = t * g / mu / (1-math.exp(-t*g/mu))\n scale[i] = Ac * scale[i]",
"def computeA(diameter):\n radius = diameter / 2.0\n return np.pi * (radius**2)",
"def surface_area_sphere(radius: float) -> float:\r\n if radius < 0:\r\n raise ValueError(\"surface_area_sphere() only accepts non-negative values\")\r\n return 4 * pi * radius**2",
"def contrast_curve_core(\n star_data,\n plate_scale,\n fwhm=1,\n radius_size=None,\n center=None,\n):\n\n # make copy of data array\n data = star_data.copy()\n\n# data = np.abs(data) #DO NOT DO THIS!!!! It's making the standard deviation too small later.\n\n ################## establish center ########\n\n x, y = np.indices((data.shape))\n\n if type(center) == type(None):\n center = np.array(\n [(x.max() - x.min()) / 2.0, (y.max() - y.min()) / 2.0]\n )\n\n if type(radius_size) == type(None):\n radius_size = fwhm\n\n ########## set up radial coordinate system ########\n\n radii = np.sqrt((x - center[0]) ** 2 + (y - center[1]) ** 2)\n radii = radii.astype(np.int64)\n\n ones = np.ones_like(data)\n\n number_of_a = int(radii.max() / radius_size)\n\n pie_edges = np.arange(0, 390, 30)\n\n ######## set up aperture array ##########\n center_ap = CircularAperture([center[0], center[1]], radius_size)\n\n all_apers, all_apers_areas, all_masks = (\n [center_ap],\n [center_ap.area],\n [center_ap.to_mask(method=\"exact\")],\n )\n\n all_data, all_weights = [all_masks[0].multiply(data)], [\n all_masks[0].multiply(ones)\n ]\n\n all_stds = [twoD_weighted_std(all_data[0], all_weights[0])]\n\n ######## construct the apertures of the annuli #######\n sigma_clip = SigmaClip(sigma=3.0)\n bkgrms = StdBackgroundRMS(sigma_clip)\n\n medians = np.zeros((number_of_a, len(pie_edges) - 1))\n stds = np.zeros((number_of_a, len(pie_edges) - 1))\n seps = np.zeros(number_of_a)\n for j in range(int(number_of_a)):\n r_in = j * radius_size + fwhm\n r_out = j * radius_size + radius_size + fwhm\n seps[j] = (r_in+r_out)/2.*plate_scale\n\n # terminate if completely outside 10 arcseconds\n if (r_in * plate_scale) > 10:\n break\n\n # create aperture\n aper = CircularAnnulus(\n [center[0], center[1]],\n r_in=r_in,\n r_out=r_out,\n )\n\n # multiply the data by the aperture mask and store it\n all_apers.append(aper)\n all_apers_areas.append(aper.area)\n mask = aper.to_mask(method=\"exact\")\n all_masks.append(mask)\n mask_data = mask.multiply(data)\n\n mask_weight = mask.multiply(ones)\n\n for i, pie_edge_near in enumerate(pie_edges[:-1]):\n pie_edge_far = pie_edges[i + 1]\n mask_data_new = mask_data.copy()\n mask_data_new = check_boundaries(\n mask_data_new, pie_edge_near, pie_edge_far\n )\n medians[j, i] = np.nanmedian(mask_data_new)\n mask_data_masked = mask_data_new[~np.isnan(mask_data_new)]\n\n mean, std = meanclip(mask_data_masked, 3, converge_num=0.2)\n stds[j, i] = std\n\n #Return only the medians and stds for distances within the desired range\n seps = seps[0:j]\n medians = medians[0:j,:]\n stds = stds[0:j,:]\n return seps, medians, stds",
"def radial_velocity(wv_obj, fx_obj, sig_obj, wv_std, fx_std, sig_std, obj_name, std_name, rv_std, rv_std_err, order,\n xcorr_width, cut, cutstart, cutend):\n\n # The more random iterations, the better... but it takes longer\n n_iter = 1000\n\n # Step 1: Fix the spectra:\n # * Select only the region in which they overlap\n # * Make a new stretched wavelength array (for sub-pixel precision work)\n # * Interpolate the data onto the new wavelength array\n # * Remove large scale slopes so we only compare line and band features\n\n # Find where standard and object overlap ---------------\n wv_min = max([min(wv_std), min(wv_obj)])\n wv_max = min([max(wv_std), max(wv_obj)])\n\n n_pix_std = len(wv_std)\n\n # Creates ln standard wavelength array ---------------------------------\n # AR 2013.0423 The wavelength array only covers the overlap region. Also, I'm folding the rebinning by 10 into this statement.\n acoef_std = (n_pix_std * 10 - 1) / (math.log(wv_max) - math.log(wv_min))\n bcoef_std = (n_pix_std * 10) - (acoef_std * math.log(wv_max))\n\n arr = np.arange(n_pix_std * 10) + 1\n wv_ln_std = np.exp((arr - bcoef_std) / acoef_std)\n\n # AR 2012.1018: Find the conversion between pixels and velocity. This will vary from instrument\n # to instrument and spectral order to spectral order, so we should preferentially calculate this\n # based on the actual input spectrum.\n # AR 2013.0422: Change the calculation to happen AFTER the corrected wavelength scale has been made\n # Find the average pixel/spectrum offset\n # Note: even though it's called micron_per_pix, it will still work if the wavelengths are\n # angstroms instead (it really converts <wavelength unit> to km/s)\n\n # Interpolate data onto same ln wavelength scale -------------------------------\n\n fx_interp_std = np.interp(wv_ln_std, wv_std, fx_std)\n fx_interp_obj = np.interp(wv_ln_std, wv_obj, fx_obj)\n sig_interp_std = np.interp(wv_ln_std, wv_std, sig_std) # AR 2012.1018 Also need to rebin sig\n sig_interp_obj = np.interp(wv_ln_std, wv_obj, sig_obj) # AR 2012.1018 Also need to rebin sig\n\n # Rebin Data ----------------------------\n\n wv_arr_std = np.asarray(wv_ln_std, dtype=float)\n fx_arr_obj = np.asarray(fx_interp_obj, dtype=float)\n fx_arr_std = np.asarray(fx_interp_std, dtype=float)\n sig_arr_obj = np.asarray(sig_interp_obj, dtype=float)\n sig_arr_std = np.asarray(sig_interp_std, dtype=float)\n\n datalen = len(fx_arr_obj)\n\n # Step 2: Measure vsini:\n # Note that as of 2015.0605, this doesn't actually work.\n\n # AR 2014.0922: For vsini:\n # In a loop:\n # Take the standard spectrum\n # broaden it to width X\n # autocorrelate,\n # measure width of gaussian Y (this is supposed to give you a means of translating between width-of-cross-correlation and vsini)\n # Fit function solving Y for X.\n # For each cross correlation of object and standard:\n # Determine vsini\n\n pix_scale = (2.99792458 * 10 ** 5) / acoef_std\n\n # vsinirange = [1,2,5,10,20,30,40,50,60,80,100,100]\n # widthrange = []\n # for v in vsinirange:\n # # Make convolution kernel for v km/s\n # kernel = lsf_rotate(pix_scale,v)\n # # Broaden the standard spectrum\n # fx_obj_wide = np.correlate(fx_arr_obj, kernel, mode='same')\n # # Rectify the spectrum\n # fx_obj_orig = (fx_arr_obj - np.mean(fx_arr_obj))/np.std(fx_arr_obj,ddof=1)\n # fx_obj_wide = (fx_obj_wide - np.mean(fx_obj_wide))/np.std(fx_obj_wide,ddof=1)\n #\n # # Remove a cubic (flatten the spectrum)\n # coeff,pcov = op.curve_fit(cubic,wv_arr_std,fx_obj_wide)\n # fx_obj_wide = fx_obj_wide - (coeff[0] + coeff[1]*wv_arr_std + coeff[2]*wv_arr_std**2 + coeff[3]*wv_arr_std**3)\n # coeff,pcov = op.curve_fit(cubic,wv_arr_std,fx_obj_orig)\n # fx_obj_orig = fx_obj_orig - (coeff[0] + coeff[1]*wv_arr_std + coeff[2]*wv_arr_std**2 + coeff[3]*wv_arr_std**3)\n #\n # # Cross-correlate the spectrum with its broadened self\n # ycorr = np.correlate(fx_obj_orig, fx_obj_wide, mode='full')\n # # Now determine where the peak is (should be near 0)\n # length = len(ycorr)\n # xcorr = np.arange(length) - length//2\n # xmid = np.argmax(ycorr)\n # ymax = np.max(ycorr)\n # # Chop out just the portion of the array near the peak\n # xcorr_min=xmid-xcorr_width\n # xcorr_max=xmid+xcorr_width\n # ycorr1=ycorr[xcorr_min:xcorr_max]\t#isolate section of array with gaussian\n # xcorr1=xcorr[xcorr_min:xcorr_max] #isolate the same section of the pixel range\n #\n # # set up initial values for gaussian fitting via chi2\n # sig = 10\n # sky = np.min(ycorr1)/1.2\n # # print ycorr1[-1],ycorr1[0],xcorr1[-1],xcorr1[0]\n # sky2 = (ycorr1[-1]-ycorr1[0])/(xcorr1[-1]-xcorr1[0])\n # lnamp = np.log(ymax/1.2-sky)\t# guess some values\n # mean = xcorr[xmid]\n #\n # amp = np.exp(lnamp)\n # sig2 = sig**2\n # # suggestion from D. Hogg 12/15/12: Add extra linear feature to fit.\n # # suggestion from D. Hogg 12/15/12: operate on ln(amp) so that the amplitude CANNOT be negative.\n # def chi2(p):\t#define gaussian function for fitting\n # sig2=p[2] ** 2\n # m = (np.exp(p[0]) * np.exp(-0.5 * (xcorr1 - p[1]) ** 2 / sig2)) + p[3] + p[4]*xcorr1\n # return (ycorr1 - m)\n #\n # # Fit the gaussian.\n # popt, ier = op.leastsq(chi2, [lnamp, mean, sig, sky, sky2])\n # lnamp, mean, sig, sky, sky2 = popt\n #\n # amp = np.exp(lnamp)\n # # record the width\n # widthrange.append(sig)\n #\n # # Plot all the widths to get a width-vsini curve\n # vsinicoeff,popt = op.curve_fit(quartic,np.asarray(widthrange),np.asarray(vsinirange))\n #\n # relationx = np.arange(50,200,1)\n # relationy = vsinicoeff[0]+vsinicoeff[1]*relationx+vsinicoeff[2]*relationx**2+vsinicoeff[3]*relationx**3+vsinicoeff[4]*relationx**4\n # figv = plt.figure(1)\n # axv = figv.add_subplot(211)\n # axv.scatter(widthrange,vsinirange)\n # axv.plot(relationx,relationy)\n # #ax.text(70,100,\"{0:} {1:} {2:} {3:} {4:}\".format(vsinicoeff))\n\n # 3. Cross-correlate the data, using n_iter trials:\n # * Generate two random gaussian noises scaled to the uncertainty on the fluxes\n # * Apply those gaussian noises to the standard and target stars\n # * Cross-correlate the standard and target stars\n # * Find and then cut out just the part of the cross-correlation curve near the maximum\n # * Set up gaussian\n # * Fit gaussian to that center part\n # * Save fitted parameters (pixel shift aka mean of gaussian, width aka stddev of gaussian)\n # * Repeat n_iter times\n\n # Cross correlation loop --------------------------------\n pix_shift = np.array([]) # initialize array for pixel shift values\n pix_width = np.zeros(n_iter) # initialize array for pixel width values\n l = 0\n\n # using the xrange generator rather than making a full list saves memory\n while len(pix_shift) < n_iter:\n # prepare the randomized data\n # GETTING ARRAYS READY FOR CROSS CORRELATION\n\n\n # Randomize noise:\n # create gaussian distribution of random numbers b/t 1 and -1, multiply err by numbers, add numbers to flux\n # I have drastically simplified the arrays here AR 2013.0319\n # AR 2013.0318: There was a problem, previously: noise was a fixed value, not linked to the known error values\n\n # AR 2013.0321: Speed fix. Rather than step through the array and generate one\n # normally-distributed error value scaled to the SNR at that point, I will generate an\n # array of normally-distributed error values scaled to 1, and then multiply by the SNR:\n # One array generation, one array multiplication.\n\n rand_dist = np.random.normal(loc=0.0, scale=1.0, size=datalen)\n rand_dist2 = np.random.normal(loc=0.0, scale=1.0, size=datalen)\n\n fx_temp_obj = np.asarray(fx_arr_obj + rand_dist * sig_arr_obj)\n fx_temp_std = np.asarray(fx_arr_std + rand_dist2 * sig_arr_std)\n mean_obj = np.mean(fx_temp_obj)\n mean_std = np.mean(fx_temp_std)\n stddev_obj = np.std(fx_temp_obj, ddof=1)\n stddev_std = np.std(fx_temp_std, ddof=1)\n\n # Regularize data (subtract mean, divide by std dev) (Should definitely be done AFTER noise was added)\n fx_reg_temp_obj = fx_temp_obj - mean_obj\n fx_reg_temp_obj = fx_reg_temp_obj / stddev_obj\n fx_reg_temp_std = fx_temp_std - mean_std\n fx_reg_temp_std = fx_reg_temp_std / stddev_std\n\n # curve fit - remove a cubic AR 2012.1113\n coeff, pcov = op.curve_fit(cubic, wv_arr_std, fx_reg_temp_obj)\n fx_reg_temp_obj = fx_reg_temp_obj - (\n coeff[0] + coeff[1] * wv_arr_std + coeff[2] * wv_arr_std ** 2 + coeff[3] * wv_arr_std ** 3)\n coeff, pcov = op.curve_fit(cubic, wv_arr_std, fx_reg_temp_std)\n fx_reg_temp_std = fx_reg_temp_std - (\n coeff[0] + coeff[1] * wv_arr_std + coeff[2] * wv_arr_std ** 2 + coeff[3] * wv_arr_std ** 3)\n\n # CROSS CORRELATION\n\n # compute the cross-correlation between the two spectra\n\n ycorr = np.correlate(fx_reg_temp_obj, fx_reg_temp_std, mode='full')\n # time required: 0.045 seconds average\n\n # http://stackoverflow.com/questions/12323959/fast-cross-correlation-method-in-python\n # conv1 = np.zeros(datalen * 2)\n # conv1[datalen/2:datalen/2+datalen] = fx_reg_temp_obj\n # conv2 = fx_reg_temp_std[::-1]\n # ycorr = signal.fftconvolve(conv1,conv2, mode='valid')\n # time required: 0.006 seconds average, but it segfaults by the third try.\n\n ## slight smoothing AR 2013.0315\n # ycorr = scipy.ndimage.filters.gaussian_filter1d(ycorr,11)\n\n # create the x offset axis (same length as ycorr, with 0 in the MIDDLE)\n length = len(ycorr)\n xcorr = np.arange(length) - length // 2\n # AR 2012.1126 Select a tiny piece around the maximum to fit with a gaussian.\n xmid = np.argmax(ycorr)\n ymax = np.max(ycorr)\n # now take just the portion of the array that matters\n xcorr_min = int(xmid - xcorr_width)\n xcorr_max = int(xmid + xcorr_width)\n ycorr1 = ycorr[xcorr_min:xcorr_max] # isolate section of array with gaussian\n xcorr1 = xcorr[xcorr_min:xcorr_max] # isolate the same section of the pixel range\n ycorr2 = ycorr[xcorr_min - 50:xcorr_max + 50]\n xcorr2 = xcorr[xcorr_min - 50:xcorr_max + 50]\n\n # suggestion from D. Hogg 12/15/12: Add extra linear feature to fit.\n # suggestion from D. Hogg 12/15/12: operate on ln(amp) so that the amplitude CANNOT be negative.\n def chi2(p): # define gaussian function for fitting\n sig2 = p[2] ** 2\n m = (np.exp(p[0]) * np.exp(-0.5 * (xcorr1 - p[1]) ** 2 / sig2)) + p[3] + p[4] * xcorr1\n return (ycorr1 - m)\n\n # set up initial values for chi2\n sig = 10\n sky = np.min(ycorr1) / 1.2\n # print ycorr1[-1],ycorr1[0],xcorr1[-1],xcorr1[0]\n sky2 = (ycorr1[-1] - ycorr1[0]) / (xcorr1[-1] - xcorr1[0])\n lnamp = np.log(ymax / 1.2 - sky) # guess some values\n mean = xcorr[xmid]\n\n amp = np.exp(lnamp)\n sig2 = sig ** 2\n\n popt, ier = op.leastsq(chi2, [lnamp, mean, sig, sky, sky2])\n lnamp, mean, sig, sky, sky2 = popt\n\n amp = np.exp(lnamp)\n\n # print_num=len(pix_shift)%100\n print_num = l % 100\n if print_num == 0:\n ## Uncomment the following to make a plot every 500 fits.\n # fig = plt.figure(l)\n # ax = fig.add_subplot(111)\n # my_gauss = (amp * (np.exp(-0.5 * ((xcorr1 - mean) ** 2) / sig**2))) + sky + sky2 * xcorr1\n # ax.plot(xcorr1,my_gauss,'r--')\n # ax.plot(xcorr2,ycorr2,'#000000')\n # ax.plot(xcorr1,ycorr1-my_gauss,'#00CC00')\n ##if abs(mean - xcorr[xmid]) > 5:\n ## print \"Mean is off\",mean,xcorr[xmid]\n # figname='rv_{0:}_{1:}_{2:}_{3:}.png'.format(std_name,obj_name,order,l)\n # ax.set_xlim(xcorr[xcorr_min-50],xcorr[xcorr_max+50])\n # fig.savefig(figname)\n # fig.clf()\n # plt.close()\n print\n \"amp={0: 12.4f} mu={1: 10.4f} sig={2: 9.4f} sky={3: 11.4f} sky2={4: 8.4f} n_entries={5:}\".format(amp,\n mean,\n sig,\n sky,\n sky2,\n len(\n pix_shift))\n\n l += 1\n if (cut == 0) | (mean > np.float(cutstart)) & (mean < np.float(cutend)):\n pix_shift = np.append(pix_shift, mean)\n # if ier < 5:\n # I'm calculating the vsini now because I need errors, and the vsini calculation is not linear.\n # pix_width[l] = vsinicoeff[0] + vsinicoeff[1] * sig + vsinicoeff[2] * sig**2 + vsinicoeff[3] * sig**3 + vsinicoeff[4] * sig**4\n\n # End cross correlation loop ---------------------------------\n\n # 4. Find the RV\n # All 5000 rv fits have been calculated and stored in arrays\n # 4a. Cut out outlier RVs. Useful if the cross-correlation produces occasional bad results. Use cutstart and cutend to force the code to only fit a gaussian to a certain region. Don't over-use this to force the result you want, though.\n # 4b. Compute the mean pixel shift and pixel shift uncertainty.\n # 4c. Convert pixel shift into RV\n # 4d. Shift the wavelength array appropriately - all lines should now line up.\n\n ## Uncomment this to print out an example cross-correlation diagram\n # fig = plt.figure(2)\n # ax = fig.add_subplot(111)\n # ax.plot(xcorr,ycorr,'k')\n # figname='rv_{0:}_{1:}_{2:}_xcorr.png'.format(std_name,obj_name,order)\n # fig.savefig(figname)\n # fig.clf()\n # plt.close()\n\n # Turn the list of pixel shifts into a numpy array\n pix_shift = np.asarray(pix_shift)\n\n # 4a. Cut out outliers from the pixel shift\n if cut == 1:\n pix_shift = pix_shift[np.where((pix_shift > np.float(cutstart)) & (pix_shift < np.float(cutend)))]\n\n # 4b. Compute the mean pixel shift (rv value) and pixel shift uncertainty (RV uncertainty).\n\n print\n l, len(pix_shift), np.float(len(pix_shift)) / np.float(n_iter) * 100.0\n\n mu = np.mean(pix_shift)\n sigma = np.std(pix_shift, ddof=1)\n\n # vsini = np.mean(pix_width)\n # vsini_err = np.std(pix_width,ddof=1)\n\n # axh = figv.add_subplot(212)\n # n, bins, patches=axh.hist(pix_width,bins=30,normed=1.0,facecolor='green',align='mid')\n # figv.savefig('vsiniplot.png')\n # plt.clf()\n # plt.close()\n\n # 4c. Transform pixel shift to shift in radial velocity\n\n # AR 2013.0423: The actually appropriate method requires a speed-of-light correction. This works for both angstroms and microns.\n rv_meas = (2.99792458 * 10 ** 5 * mu) / acoef_std\n rv_meas_err = (2.99792458 * 10 ** 5 * sigma) / acoef_std\n\n # 4d. Apply shift to arrays\n wv_rvcorr_obj = wv_arr_std * (1 - rv_meas / (2.99792458 * 10 ** 5))\n\n ## 5. Create plots ---------------------------------\n # The plots are the only reason find_rv.py needs to know the names of either star, or the RV of the standard.\n\n # Plot object and standard so you can clearly see that shift exists --------------------------------\n fig = plt.figure(1)\n\n # AR 2013.0703 Regularize the spectra for display purposes in the final graph\n # I'm using the mean and stddev of the last random-added attempt so it won't be perfect...\n fx_reg_obj = fx_arr_obj - mean_obj\n fx_reg_obj = fx_reg_obj / stddev_obj\n fx_reg_std = fx_arr_std - mean_std\n fx_reg_std = fx_arr_std / stddev_std\n\n # Plots target and standard with shift applied\n ax1 = fig.add_subplot(311)\n ax1.plot(wv_rvcorr_obj, fx_reg_obj, 'red')\n ax1.plot(wv_arr_std, fx_reg_std, 'blue')\n ax1.set_xlabel('wavelength (microns)')\n ax1.set_ylabel('normalized flux')\n target = 'Target: %s' % (obj_name)\n standard = 'Standard: %s' % (std_name)\n ax1.annotate(target, xy=(.7, .9), xycoords='axes fraction', xytext=(.6, .9), textcoords='axes fraction',\n color='red')\n ax1.annotate(standard, xy=(.7, .8), xycoords='axes fraction', xytext=(.6, .8), textcoords='axes fraction',\n color='blue')\n\n sig2 = sig ** 2\n my_gauss = (amp * (np.exp(-0.5 * ((xcorr1 - mu) ** 2) / sig2))) + sky + sky2 * xcorr1\n\n # Plots example of gaussian fit to cross correlation function\n ax2 = fig.add_subplot(312)\n ax2.plot(xcorr1, ycorr1, 'k.')\n ax2.plot(xcorr1, my_gauss, 'r--', linewidth=2)\n ax2.plot(xcorr1, ycorr1 - my_gauss, '#00CC00')\n ax2.set_xlabel('example of fit to cross correlation function')\n ax2.set_xlim(xcorr[xcorr_min - 50], xcorr[xcorr_max + 50])\n # print pix_shift\n\n\n ## Plot histogram of pixel shift values --------------------------------\n ax3 = fig.add_subplot(313)\n n, bins, patches = plt.hist(pix_shift, bins=30, normed=1.0, facecolor='green', align='mid')\n # Plot best fit gaussian over histogram\n y = mlab.normpdf(bins, mu, sigma)\n ax3.plot(bins, y, 'r--', linewidth=2)\n ax3.set_xlabel('radial velocity of target (pixels)')\n ax3.set_ylabel('frequency (normalized)')\n rad = 'RV = %.3f +/- %.3f' % (rv_meas, rv_meas_err)\n corr = 'RV (corr) = %.3f +/- %.3f' % (rv_std + rv_meas, (rv_std_err ** 2 + rv_meas_err ** 2) ** (0.5))\n # vsinistr = 'VsinI = %.3f +/- %.3f' % (vsini,vsini_err)\n ax3.annotate(rad, xy=(.66, .9), xycoords='axes fraction', xytext=(.66, .9), textcoords='axes fraction',\n color='black')\n ax3.annotate(corr, xy=(.6, .8), xycoords='axes fraction', xytext=(.60, .8), textcoords='axes fraction',\n color='black')\n # ax3.annotate(vsinistr,xy=(.6,.6),xycoords='axes fraction',xytext=(.60,.6),textcoords='axes fraction',color='black')\n ax3.annotate('{0:+5.2f} {1: 5.2f}'.format(mu, sigma), xy=(.05, .9), xycoords='axes fraction', xytext=(.05, .9),\n textcoords='axes fraction', color='black')\n ax3.annotate('{0:5.3f} km/s/pix'.format((2.99792458 * 10 ** 5) / acoef_std), xy=(.05, .8), xycoords='axes fraction',\n xytext=(.05, .8), textcoords='axes fraction', color='black')\n fig.subplots_adjust(hspace=.3)\n\n figname = 'rv_%s_%s_%d.png' % (std_name, obj_name, order)\n fig.savefig(figname)\n fig.clf()\n plt.close()\n\n # plt.figure(l+1)\n # plt.hist(pix_shift)\n\n # END RADIAL VELOCITY FUNCTION -----------------------------------------\n return rv_meas, rv_meas_err",
"def effective_radius(self, n):\n\n er2 = 5.0 * self.sa / n\n er = np.sqrt(er2)\n\n return er",
"def calculate(self, atoms):\n pmat = self.get_polarizability(self.omega, Eext=np.array([1.0, 1.0, 1.0]))\n\n # Specific for raman calls, it expects just the tensor for a single\n # frequency and need only the real part\n\n # For static raman, imaginary part is zero??\n # Answer from Michael Walter: Yes, in the case of finite systems you may\n # choose the wavefunctions to be real valued. Then also the density\n # response function and hence the polarizability are real.\n\n # Convert from atomic units to e**2 Ang**2/eV\n return pmat[:, :, 0].real * (un.Bohr**2) / un.Ha",
"def jam_axi_rms(surf_lum, sigma_lum, qobs_lum, surf_pot, sigma_pot, qobs_pot,\n inc, mbh, distance, xbin, ybin, ml=None, normpsf=1., pixang=0.,\n pixsize=0., plot=True, rms=None, erms=None, sigmapsf=0.,\n goodbins=None, quiet=False, beta=None, step=0., nrad=20,\n nang=10, rbh=0.01, tensor='zz', vmin=None, vmax=None, **kwargs):\n if beta is None:\n beta = np.zeros_like(surf_lum) # Anisotropy parameter beta = 1 - (sig_z/sig_R)**2\n if not (surf_lum.size == sigma_lum.size == qobs_lum.size == beta.size):\n raise ValueError(\"The luminous MGE components do not match\")\n if not (surf_pot.size == sigma_pot.size == qobs_pot.size):\n raise ValueError(\"The total mass MGE components do not match\")\n if xbin.size != ybin.size:\n raise ValueError(\"xbin and ybin do not match\")\n if rms is not None:\n if erms is None:\n erms = np.full_like(rms, np.median(rms)*0.05) # Constant ~5% errors\n if goodbins is None:\n goodbins = np.ones_like(rms, dtype=bool)\n elif goodbins.dtype != bool:\n raise ValueError(\"goodbins must be a boolean vector\")\n if not (xbin.size == rms.size == erms.size == goodbins.size):\n raise ValueError(\"(rms, erms, goodbins) and (xbin, ybin) do not match\")\n\n sigmapsf = np.atleast_1d(sigmapsf)\n normpsf = np.atleast_1d(normpsf)\n if sigmapsf.size != normpsf.size:\n raise ValueError(\"sigmaPSF and normPSF do not match\")\n\n pc = distance*np.pi/0.648 # Constant factor to convert arcsec --> pc\n\n surf_lum_pc = surf_lum\n surf_pot_pc = surf_pot\n sigma_lum_pc = sigma_lum*pc # Convert from arcsec to pc\n sigma_pot_pc = sigma_pot*pc # Convert from arcsec to pc\n xbin_pc = xbin*pc # Convert all distances to pc\n ybin_pc = ybin*pc\n pixSize_pc = pixsize*pc\n sigmaPsf_pc = sigmapsf*pc\n step_pc = step*pc\n\n # Add a Gaussian with small sigma and the same total mass as the BH.\n # The Gaussian provides an excellent representation of the second moments\n # of a point-like mass, to 1% accuracy out to a radius 2*sigmaBH.\n # The error increses to 14% at 1*sigmaBH, independently of the BH mass.\n #\n if mbh > 0:\n sigmaBH_pc = rbh*pc # Adopt for the BH just a very small size\n surfBH_pc = mbh/(2*np.pi*sigmaBH_pc**2)\n surf_pot_pc = np.append(surfBH_pc, surf_pot_pc) # Add Gaussian to potential only!\n sigma_pot_pc = np.append(sigmaBH_pc, sigma_pot_pc)\n qobs_pot = np.append(1., qobs_pot) # Make sure vectors do not have extra dimensions\n\n qobs_lum = qobs_lum.clip(0, 0.999)\n qobs_pot = qobs_pot.clip(0, 0.999)\n\n t = clock()\n rmsModel = _vrms2(xbin_pc, ybin_pc, inc, surf_lum_pc, sigma_lum_pc,\n qobs_lum, surf_pot_pc, sigma_pot_pc, qobs_pot, beta,\n tensor, sigmaPsf_pc, normpsf, pixSize_pc, pixang,\n step_pc, nrad, nang)\n if not quiet:\n print('jam_axi_rms elapsed time sec: %.2f' % (clock() - t))\n\n if tensor in ('xx', 'yy', 'zz'):\n rmsModel = np.sqrt(rmsModel.clip(0)) # Return SQRT and fix possible rounding errors\n if tensor in ('xy', 'xz'):\n rmsModel *= np.sign(xbin*ybin) # Calculation was done in positive quadrant\n\n # Analytic convolution of the MGE model with an MGE circular PSF\n # using Equations (4,5) of Cappellari (2002, MNRAS, 333, 400)\n #\n lum = surf_lum_pc*qobs_lum*sigma_lum**2 # Luminosity/(2np.pi) of each Gaussian\n flux = np.zeros_like(xbin) # Total MGE surface brightness for plotting\n for sigp, norp in zip(sigmapsf, normpsf): # loop over the PSF Gaussians\n sigmaX = np.sqrt(sigma_lum**2 + sigp**2)\n sigmaY = np.sqrt((sigma_lum*qobs_lum)**2 + sigp**2)\n surfConv = lum / (sigmaX*sigmaY) # PSF-convolved in Lsun/pc**2\n for srf, sx, sy in zip(surfConv, sigmaX, sigmaY): # loop over the galaxy MGE Gaussians\n flux += norp*srf*np.exp(-0.5*((xbin/sx)**2 + (ybin/sy)**2))\n\n if rms is None:\n\n chi2 = None\n if ml is None:\n ml = 1.\n else:\n rmsModel *= np.sqrt(ml)\n\n else:\n\n if (ml is None) or (ml <= 0):\n\n # y1, dy1 = rms, erms # (y1 are the data, y2 the model)\n # scale = sum(y1*y2/dy1**2)/sum(y2**2/dy1**2) # (equation 51)\n #\n ml = (np.sum(rms[goodbins]*rmsModel[goodbins]/erms[goodbins]**2)\n / np.sum((rmsModel[goodbins]/erms[goodbins])**2))**2\n\n rmsModel *= np.sqrt(ml)\n chi2 = np.sum(((rms[goodbins]-rmsModel[goodbins])/erms[goodbins])**2) / goodbins.sum()\n\n if not quiet:\n print('inc=%.1f beta_z=%.2f M/L=%.3g BH=%.2e chi2/DOF=%.3g' % (inc, beta[0], ml, mbh*ml, chi2))\n mass = 2*np.pi*surf_pot_pc*qobs_pot*sigma_pot_pc**2\n print('Total mass MGE: %.4g' % np.sum(mass*ml))\n\n if plot:\n\n rms1 = rms.copy() # Only symmetrize good bins\n rms1[goodbins] = symmetrize_velfield(xbin[goodbins], ybin[goodbins], rms[goodbins])\n\n if (vmin is None) or (vmax is None):\n vmin, vmax = stats.scoreatpercentile(rms1[goodbins], [0.5, 99.5]) # Could use np.percentile in Numpy 1.10\n\n plt.clf()\n plt.subplot(121)\n plot_velfield(xbin, ybin, rms1, vmin=vmin, vmax=vmax, flux=flux, **kwargs)\n plt.title(r\"Input $V_{\\rm rms}$\")\n\n plt.subplot(122)\n plot_velfield(xbin, ybin, rmsModel, vmin=vmin, vmax=vmax, flux=flux, **kwargs)\n plt.plot(xbin[~goodbins], ybin[~goodbins], 'ok', mec='white')\n plt.title(r\"Model $V_{\\rm rms}$\")\n plt.tick_params(labelleft='off')\n plt.subplots_adjust(wspace=0.03)\n\n return rmsModel, ml, chi2, flux",
"def two_sphere_system(\n omega: float,\n rot_axis: np.ndarray,\n size: int = 200,\n s1_center_rel: np.ndarray = np.array([0.2, 0.2, 0.2]),\n s1_radius_rel: float = 0.05,\n s2_center_rel: np.ndarray = np.array([-0.2, -0.2, -0.2]),\n s2_radius_rel: float = 0.06,\n) -> np.ndarray:\n # get the rotation object\n rot_axis = rot_axis / np.linalg.norm(rot_axis)\n rotation = R.from_rotvec(-omega * rot_axis)\n # calculate the rotated sphere centers\n # sphere 1\n s1_rel = rotation.apply(s1_center_rel)\n # sphere 2\n s2_rel = rotation.apply(s2_center_rel)\n # get the index grid\n # NOTE: extend the range to make sure the sphere is not rotated out of the volume\n # grid_x, grid_y, grid_z = np.mgrid[0:size, 0:size, 0:size]\n # remapping to compensate for the strange coordinate system in tomopy projector\n grid_y, grid_z, grid_x = np.mgrid[0:size, 0:size, 0:size]\n # rescale to [-0.5, 0.5]\n grid_x = grid_x / (size - 1) - 0.5\n grid_y = -(grid_y / (size - 1) - 0.5)\n grid_z = grid_z / (size - 1) - 0.5\n # init volume\n vol = np.zeros_like(grid_x)\n # mark the voxels of sphere 1 to be 1\n s1_dist_squared = (grid_x - s1_rel[0]) ** 2 + (grid_y - s1_rel[1]) ** 2 + (grid_z - s1_rel[2]) ** 2\n r1_squared = s1_radius_rel**2\n vol[s1_dist_squared < r1_squared] = 1.0\n # mark the voxels of sphere 2 to be 2\n s2_dist_squared = (grid_x - s2_rel[0]) ** 2 + (grid_y - s2_rel[1]) ** 2 + (grid_z - s2_rel[2]) ** 2\n r2_squared = s2_radius_rel**2\n vol[s2_dist_squared < r2_squared] = 1.0\n return vol",
"def Truncated_radius(self):\n r_trunc = fminbound(self.Mass_diff_005, -10., np.log10(self.scale_radius))\n return 10**float(r_trunc)",
"def sphere(geometry,\n psd_name,psd_shape,psd_loc,psd_scale,\n pore_seed='pore.seed',\n psd_offset=0,\n **kwargs):\n import scipy.stats as spst\n prob_fn = getattr(spst,psd_name)\n P = prob_fn(psd_shape,loc=psd_loc,scale=psd_scale)\n value = P.ppf(geometry[pore_seed])+psd_offset\n return value",
"def magnification(w0, lambda0, s, f, M2=1):\n zR2 = z_rayleigh(w0, lambda0, M2)**2\n return f/np.sqrt((s+f)**2+zR2)",
"def onsphere(size=None):\n xy = oncircle(size)\n z = 2.*random(xy.shape[:-1] + (1,)) - 1.\n xy *= sqrt(1. - z*z)\n return concatenate((xy, z), axis=-1)",
"def sphere(\n network,\n pore_diameter='pore.diameter'\n):\n return 4/3*_pi*(network[pore_diameter]/2)**3",
"def calculate_rms(samples):\n chunk = pow(abs(samples), 2)\n return math.sqrt(chunk.mean())",
"def rms(sig):\n\n return np.sqrt(np.sum(np.array(sig)**2)/len(sig))",
"def dp_radius(self, s, survey='SPIRE_500'):\n shape = np.array(s[survey].shape)\n cosPA, sinPA = np.cos(s['PA_RAD']), np.sin(s['PA_RAD'])\n cosINCL = s['cosINCL']\n w = s[survey + '_WCS']\n xcm, ycm = s['RA_RAD'], s['DEC_RAD']\n dp_coords = np.zeros([shape[0], shape[1], 2])\n # Original coordinate is (y, x)\n # :1 --> x, RA --> the one needed to be divided by cos(incl)\n # :0 --> y, Dec\n dp_coords[:, :, 0], dp_coords[:, :, 1] = \\\n np.meshgrid(np.arange(shape[1]), np.arange(shape[0]))\n # Now, value inside dp_coords is (x, y)\n # :0 --> x, RA --> the one needed to be divided by cos(incl)\n # :1 --> y, Dec\n for i in range(shape[0]):\n dp_coords[i] = Angle(w.wcs_pix2world(dp_coords[i], 1) * u.deg).rad\n dp_coords[:, :, 0] = 0.5 * (dp_coords[:, :, 0] - xcm) * \\\n (np.cos(dp_coords[:, :, 1]) + np.cos(ycm))\n dp_coords[:, :, 1] -= ycm\n # Now, dp_coords is (dx, dy) in the original coordinate\n # cosPA*dy-sinPA*dx is new y\n # cosPA*dx+sinPA*dy is new x\n if survey[:5] == 'GALEX':\n return np.sqrt((cosPA * dp_coords[:, :, 1] +\n sinPA * dp_coords[:, :, 0])**2 +\n ((cosPA * dp_coords[:, :, 0] -\n sinPA * dp_coords[:, :, 1]))**2) * \\\n s['DIST_MPC'] * 1.0E3 # Radius in kpc\n else:\n return np.sqrt((cosPA * dp_coords[:, :, 1] +\n sinPA * dp_coords[:, :, 0])**2 +\n ((cosPA * dp_coords[:, :, 0] -\n sinPA * dp_coords[:, :, 1]) / cosINCL)**2) * \\\n s['DIST_MPC'] * 1.0E3 # Radius in kpc",
"def sample_radii(size=1):\n interp_func = InterpolatedUnivariateSpline(m_grid, np.log(r_grid), k=1)\n return np.exp(interp_func(np.random.uniform(0, 1, size=size))) * u.kpc",
"def fried_parameter_cm(wavelength,arcseconds_of_seeing_500nm=1.,zenith_angle_deg = 0.):\n r0_500nm_cm = (500e-9/(arcseconds_of_seeing_500nm*(np.pi/(180*3600))))*100\n k = r0_500nm_cm/(500e-9)**(6./5)\n r00 = k*wavelength**(6./5.)\n zenith_angle_rad = np.radians(zenith_angle_deg)\n r0z = r00 * np.cos(zenith_angle_rad)**(3/5.) #p60 DFB POI\n return r0z",
"def quality(\n wavelength: Union[Quantity, ndarray],\n flux: Union[Quantity, ndarray],\n mask: Optional[ndarray] = None,\n **kwargs,\n) -> float:\n flux = flux * u.dimensionless_unscaled # Turn into Quantity if not already\n flux = flux / flux.unit # Remove units from flux (sqrt(N_e) is unitless)\n\n wis = sqrt_sum_wis(wavelength, flux, mask=mask, **kwargs)\n q = wis / np.sqrt(np.nansum(flux))\n return q.value",
"def compute_projmass(args):\n radius = args.radius/3600.0\n\n k_map = pyfits.open(args.kappa_map)\n k_data = k_map[0].data\n k_data_tmp = k_data\n\n pix_dim = math.fabs(k_map[0].header[\"CDELT1\"])\n pix_unit = k_map[0].header[\"CUNIT1\"]\n shape = k_map[0].data.shape\n\n x_axis = np.linspace(-(shape[0] - 1.0)/2.0*pix_dim , \\\n (shape[0] - 1.0)/2.0*pix_dim, shape[0])\n y_axis = np.linspace(-(shape[1] - 1.0)/2.0*pix_dim , \\\n (shape[1] - 1.0)/2.0*pix_dim, shape[1])\n\n if pix_unit != \"deg\":\n print \"Error, pixel unit not in deg\"\n if (x_axis.max() - x_axis.min())/2.0 < radius:\n print \"Error, the radius is larger than the image limits\"\n\n\n proj_mass = 0.0\n for i_x in range(shape[0]):\n for i_y in range(shape[1]):\n if x_axis[i_x]**2.0 + y_axis[i_y]**2.0 <= radius**2.0:\n #k_data_tmp[i_x][i_y] = 0.0\n proj_mass += k_data_tmp[i_x][i_y]\n\n print \"%e M_sol\" % (proj_mass*1E12)\n\n if args.plot_cont:\n circ = fc.make_circunference(radius*3600, 0, 0)\n plt.plot(circ[0], circ[1], \"k--\", linewidth = 2)\n plt.contour(x_axis*3600.0, y_axis*3600.0, k_data)\n plt.show()\n\n return proj_mass",
"def app_mag(abs_mag, phase_angle, slope_g, d_ast_sun, d_ast_earth):\n\n # Compute the apparent / visual magnitude\n mag = red_mag(abs_mag, phase_angle, slope_g) \\\n + 5.0 * np.log10(d_ast_sun * d_ast_earth)\n\n # Return the apparent magnitude\n return mag",
"def norm_bound(self, input_mags):\n return np.sum(input_mags)",
"def radial_profile(self,center,binsize,totalsize,pa=0,ratio=1):\n nsteps = int(totalsize/binsize)\n radii = np.zeros(nsteps)\n radial = np.zeros(nsteps)\n eradial = np.zeros(nsteps)\n binpix = binsize/self.xyscale\n megamask = np.zeros(self.size)\n for i in range(nsteps):\n inner,outer = i*binpix,(i+1)*binpix\n if i > 0:\n mask = anullarmask(self.image,center,self.size,inner,outer,pa=pa,ratio=ratio)\n else:\n mask = circmask(self.image,center,self.size,outer,pa=pa,ratio=ratio)\n megamask += mask\n avg = np.average(self.image,weights=mask)\n err = rms_masked(self.image,mask)\n radial[i] = avg\n eradial[i] = err\n radii[i] = (outer+inner)/2.\n\n self.radii = radii*self.xyscale\n self.radial = radial\n self.eradial = np.sqrt(eradial**2+self.noise**2)\n self.megamask = megamask\n return radii*self.xyscale,radial,eradial",
"def _test_get_set_minimal_bounding_sphere_radius(shape, centered=False):\n base_attr = \"minimal\" + (\"_centered_\" if centered else \"_\")\n sphere_type = \"circle\" if isinstance(shape, Shape2D) else \"sphere\"\n attr = base_attr + \"bounding_\" + sphere_type\n\n bounding_sphere = getattr(shape, attr)\n bounding_sphere_radius = getattr(shape, attr + \"_radius\")\n\n assert np.isclose(bounding_sphere_radius, bounding_sphere.radius)\n setattr(shape, attr + \"_radius\", bounding_sphere_radius * 2)\n assert np.isclose(getattr(shape, attr).radius, bounding_sphere_radius * 2)",
"def get_scale_parameter(self):\r\n \r\n if self.scale_parameter == 0.0: \r\n shape_in_gamma_func = float(1+(1/self.shape_parameter))\r\n gamma_func = special.gamma(shape_in_gamma_func)\r\n self.scale_parameter = (self.mean_fire_recurrence/gamma_func)\r\n return self.scale_parameter\r\n else:\r\n return self.scale_parameter",
"def eeg_rms(array, axis=0):\t\t\n\treturn np.sqrt(np.mean(array ** 2,axis))",
"def _template_sphere_disc(dim, outer_radius, inner_radius):\n rmax = np.array(outer_radius, ndmin=1)\n rmin = np.array(inner_radius, ndmin=1)\n ind = 2 * rmax - 1\n coord = np.indices((ind * np.ones(dim, dtype=int)))\n coord = coord - (ind - 1)/2\n x = coord[0, :]\n y = coord[1, :]\n if dim == 2:\n img = (x ** 2 + y ** 2) < rmax ** 2\n elif dim == 3:\n z = coord[2, :]\n img = (x ** 2 + y ** 2 + z ** 2) < rmax ** 2\n if rmin[0] != 0:\n if dim == 2:\n img_min = (x ** 2 + y ** 2) > rmin ** 2\n elif dim == 3:\n img_min = (x ** 2 + y ** 2 + z ** 2) > rmin ** 2\n img = img * img_min\n return img",
"def QuatMag(wxyz):\n return np.sqrt(np.sum(np.square(wxyz)))",
"def get_effective_radius(self, vel_disp, m_V):\n\t\tlog_vel_disp = np.log10(vel_disp)\n\t\tlog_R_eff = self.a*log_vel_disp + self.b*m_V + self.c + np.random.randn()*self.intrinsic_scatter\n\t\tR_eff = 10**log_R_eff\n\t\treturn R_eff",
"def spherefcn(x: np.ndarray) -> np.ndarray:\n if x.ndim == 1:\n x = x.reshape(-1, len(x))\n f = np.sum(x**2, axis=1)\n return f.reshape(-1, 1)"
] | [
"0.6397559",
"0.59061825",
"0.58994806",
"0.5894734",
"0.58598316",
"0.58598316",
"0.5819259",
"0.5739598",
"0.57345325",
"0.56258273",
"0.5618448",
"0.5549182",
"0.55437136",
"0.5513248",
"0.55029523",
"0.5493522",
"0.54327166",
"0.54285365",
"0.5425453",
"0.54176575",
"0.5415356",
"0.5405274",
"0.5397435",
"0.5390997",
"0.5390997",
"0.5389578",
"0.53839207",
"0.5378676",
"0.53710204",
"0.5360433",
"0.53593034",
"0.5348892",
"0.5348892",
"0.5347557",
"0.53394395",
"0.5328572",
"0.5321944",
"0.5321059",
"0.5316562",
"0.5313089",
"0.5306187",
"0.5302575",
"0.5299937",
"0.5294888",
"0.5283557",
"0.52675617",
"0.52580446",
"0.52549666",
"0.52408963",
"0.5240749",
"0.52267694",
"0.52192163",
"0.5207241",
"0.520052",
"0.5194818",
"0.51925534",
"0.51907027",
"0.51887834",
"0.5181062",
"0.5178709",
"0.51747",
"0.51628876",
"0.51484543",
"0.51462495",
"0.5131572",
"0.51275814",
"0.5117675",
"0.5117675",
"0.5109079",
"0.5098702",
"0.509074",
"0.5089861",
"0.5087553",
"0.50871706",
"0.50861776",
"0.5085914",
"0.5077483",
"0.506312",
"0.5057736",
"0.50502294",
"0.5048022",
"0.5044676",
"0.50443155",
"0.5041718",
"0.5031023",
"0.5027414",
"0.5022879",
"0.5017868",
"0.5015656",
"0.5014445",
"0.49992752",
"0.49935865",
"0.49903253",
"0.49891895",
"0.49841145",
"0.49826813",
"0.49661645",
"0.4965307",
"0.49617812",
"0.49616748"
] | 0.6352933 | 1 |
Checks the spaxel scale at a certain wavelength, for a given aperture radius defined for a [1, 1] physical array | def check_spaxel_scale(rho_aper, wavelength):
SPAXEL_RAD = rho_aper * wavelength / ELT_DIAM * 1e-6
SPAXEL_MAS = SPAXEL_RAD * MILIARCSECS_IN_A_RAD
print('%.2f mas spaxels at %.2f microns' %(SPAXEL_MAS, wavelength)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def spaxel_scale(scale=4, wave=1.0):\n\n scale_rad = scale / MILIARCSECS_IN_A_RAD\n rho = scale_rad * ELT_DIAM / (wave * 1e-6)\n print(rho)",
"def rho_spaxel_scale(spaxel_scale=4.0, wavelength=1.0):\n\n scale_rad = spaxel_scale / MILIARCSECS_IN_A_RAD\n rho = scale_rad * ELT_DIAM / (wavelength * 1e-6)\n return rho",
"def guess_scaling(name, spectrum):\n spectra = '%s/disp/%s.1d.fits' % (name, zerocount(spectrum))\n skyname = '%s/sky.1d.fits' % name\n spectrafits = pyfits.open(spectra)\n skyfits = pyfits.open(skyname)\n scalings = []\n for line in LINES:\n spec_peak, spec_cont = get_peak_cont(spectrafits, line, 5)\n sky_peak, sky_cont = get_peak_cont(skyfits, line, 5)\n scale = ((spec_peak - spec_cont) / (sky_peak - sky_cont))\n scalings.append(scale)\n return avg(*scalings)",
"def powerlaw(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def analysis_function_rms_wfe(system, wavelength_idx, config, spaxels_per_slice, surface, pupil_sampling,\n remove_slicer=False):\n if config % 20 == 0:\n print(config)\n\n # Set Current Configuration\n system.MCE.SetCurrentConfiguration(config)\n\n # [WARNING]: for the 4x4 spaxel scale we noticed that a significant fraction of the rays get vignetted at the slicer\n # this introduces a bias in the RMS WFE calculation. To avoid this, we modify the Image Slicer aperture definition\n # so that all rays get through. Consequently, enough pupil rays are traced to get an unbiased estimation of RMS WFE\n if remove_slicer is True:\n\n expand_slicer_aperture(system)\n\n # [1] Some housekeeping and pre-processing operations\n # Get the Field Points for that configuration\n sysField = system.SystemData.Fields\n # Problem with the MC files. Before, all the E2E files had only 3 fields, now there's more, some spurious ones\n # So N_fields is no longer 3. Let's just hardcode the value to 3 temporarily\n # N_fields = sysField.NumberOfFields\n N_fields = 3\n N_waves = len(wavelength_idx)\n N_rays = N_waves * spaxels_per_slice\n\n # The only valid Field Points we should care about are 1-3 as defined by Matthias\n # The default Field Point definition of the E2E files is 1 & 3 are the edges of the slice, 2 is the centre\n fx_min, fy_min = sysField.GetField(1).X, sysField.GetField(1).Y\n fx_max, fy_max = sysField.GetField(3).X, sysField.GetField(3).Y\n\n # Note that this assumes Rectangular Normalization, the default in the E2E files.\n X_MAX = np.max([np.abs(sysField.GetField(i + 1).X) for i in range(N_fields)])\n Y_MAX = np.max([np.abs(sysField.GetField(i + 1).Y) for i in range(N_fields)])\n\n # Normalized field coordinates (hx, hy)\n hx_min, hx_max = fx_min / X_MAX, fx_max / X_MAX\n hy_min, hy_max = fy_min / Y_MAX, fy_max / Y_MAX\n\n # Sample between the edges of the slice as given by \"spaxels_per_slice\" to include as many points as we want\n hx = np.linspace(hx_min, hx_max, spaxels_per_slice)\n hy = np.linspace(hy_min, hy_max, spaxels_per_slice)\n\n # The useful data that we'll store\n obj_xy = np.array([X_MAX * hx, Y_MAX * hy]).T # The Field coordinates for the Object plane\n RMS_WFE = np.empty((N_waves, spaxels_per_slice)) # The RMS WFE results\n foc_xy = np.empty((N_waves, spaxels_per_slice, 2)) # The Chief Ray coordinates at the Detector\n\n # [2] This is where the core of the RMS WFE calculation takes place\n # First, we begin by defining the Raytrace\n raytrace = system.Tools.OpenBatchRayTrace()\n normUnPolData = raytrace.CreateNormUnpol(N_rays, constants.RaysType_Real, surface)\n\n # Start creating the Merit Function\n theMFE = system.MFE\n\n # Clear any operands that could be left from the E2E files\n nops = theMFE.NumberOfOperands\n theMFE.RemoveOperandsAt(1, nops)\n\n # Build the Merit Function\n # Set first operand to current configuration\n op = theMFE.GetOperandAt(1)\n op.ChangeType(constants.MeritOperandType_CONF)\n op.GetOperandCell(constants.MeritColumn_Param1).Value = config\n wfe_op = constants.MeritOperandType_RWRE # The Type of RMS WFE Operand: RWRE rectangular\n\n # Populate the Merit Function with RMS WFE Operands\n # Loop over the wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n\n # Loop over all Spaxels in the Slice\n for j_field, (h_x, h_y) in enumerate(zip(hx, hy)):\n\n op = theMFE.AddOperand()\n op.ChangeType(wfe_op)\n op.GetOperandCell(constants.MeritColumn_Param1).Value = int(pupil_sampling)\n op.GetOperandCell(constants.MeritColumn_Param2).Value = int(wave_idx)\n op.GetOperandCell(constants.MeritColumn_Param3).Value = float(h_x)\n op.GetOperandCell(constants.MeritColumn_Param4).Value = float(h_y)\n op.GetOperandCell(constants.MeritColumn_Weight).Value = 0\n\n # Take advantage of the loop to simultaneously add the ray to the RayTrace\n normUnPolData.AddRay(wave_idx, h_x, h_y, 0, 0, constants.OPDMode_None)\n\n # time_1 = time() - start0\n # print(\"\\nTime spent setting up MF and Raytrace: %.3f sec\" % time_1)\n # start = time()\n\n # update the Merit Function\n theMFE.CalculateMeritFunction()\n # time_mf = time() - start\n # print(\"Time spent updating MF: %.3f sec\" % time_mf)\n\n # start = time()\n # Run the RayTrace for the whole Slice\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n # time_ray = time() - start\n # print(\"Time spent running Raytrace: %.3f sec\" % time_ray)\n\n # start = time()\n # [3] Time to start reading the results of the RMS WFE Operands + Raytrace coordinates\n normUnPolData.StartReadingResults()\n # Loop over the wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n # Loop over all Spaxels in the Slice\n for j_field, (h_x, h_y) in enumerate(zip(hx, hy)):\n\n # Calculate the Row index we need to get the Operand\n irow = 2 + i_wave * spaxels_per_slice + j_field\n # print(irow)\n\n op = theMFE.GetOperandAt(irow)\n\n # print(op.GetOperandCell(constants.MeritColumn_Param1).Value)\n # print(op.GetOperandCell(constants.MeritColumn_Param2).Value)\n # print(op.GetOperandCell(constants.MeritColumn_Param3).Value)\n # print(op.GetOperandCell(constants.MeritColumn_Param4).Value)\n rms = op.Value\n\n wavelength = system.SystemData.Wavelengths.GetWavelength(wave_idx).Wavelength\n\n RMS_WFE[i_wave, j_field] = wavelength * 1e3 * rms # We assume the Wavelength comes in Microns\n\n # If we get an RMS value of 0.0, print the data so we can double check the Zemax file\n # This is bad news and it mean the Rays are being vignetted somewhere\n if RMS_WFE[i_wave, j_field] == 0.0:\n print(\"\\nConfig #%d | Wave #%d | Field #%d\" % (config, wave_idx, j_field + 1))\n # raise ValueError\n\n output = normUnPolData.ReadNextResult()\n if output[2] == 0:\n x, y = output[4], output[5]\n foc_xy[i_wave, j_field, 0] = x\n foc_xy[i_wave, j_field, 1] = y\n\n vignetting_code = output[3]\n if vignetting_code != 0:\n vignetting_surface = system.LDE.GetSurfaceAt(vignetting_code).Comment\n # print(\"\\nConfig #%d\" % (config))\n # print(\"Vignetting at surface #%d: %s\" % (vignetting_code, vignetting_surface))\n # if config == 1:\n # raise ValueError\n\n normUnPolData.ClearData()\n CastTo(raytrace, 'ISystemTool').Close()\n # time_res = time() - start\n # print(\"Time spent reading results: %.3f sec\" % time_res)\n\n # time_total = time() - start0\n # print(\"TOTAL Time: %.3f sec\" % time_total)\n # sec_per_wave = time_total / N_waves * 1000\n # print(\"%3.f millisec per Wavelength\" % sec_per_wave)\n\n return [RMS_WFE, obj_xy, foc_xy]",
"def robust_scale(X, *, axis=..., with_centering=..., with_scaling=..., quantile_range=..., copy=..., unit_variance=...):\n ...",
"def apply_spectral_radius(w,spectral_radius):\n assert len(w.shape)==2 and w.shape[0]==w.shape[1],\\\n \"Error: apply_spectral_radius must receive 'w' as a square matrix.\"\n\n new_w = np.array(w)\n spectral_radius_w = calc_spectral_radius(w)\n if spectral_radius_w > 0.0:\n new_w = (w / spectral_radius_w) * spectral_radius\n else:\n print(\"Warning: Spectral radius of 'w' is zero (because of small size). Therefore, spectral radius does not changed.\")\n\n return new_w",
"def any_scale(scale):\n return scale",
"def param_scale_check(shape_x, shape_scale):\n\n length_x = len(shape_x)\n length_scale = len(shape_scale)\n\n if not(length_scale == 1 and shape_scale[0] == 1):\n if length_x != length_scale:\n raise RuntimeError(\n \"length_x and length_scale must be equal\")\n for i in range(length_scale):\n if shape_scale[i] != shape_x[i] and shape_scale[i] != 1:\n raise RuntimeError(\n \"shape_scale is not match to broadcast\")",
"def scale_mag_1(x):\n return np.array([np.true_divide(ui, mag(x)) for ui in x])",
"def norm_spectra(spectra, add_infinity=True):\n from scipy import interpolate\n start_n=np.array([3770.,3796.,3835.,3895.,3995.,4130.,4490.,4620.,5070.,5200.,\n 6000.,7000.,7550.,8400.])\n end_n=np.array([3795.,3830.,3885.,3960.,4075.,4290.,4570.,4670.,5100.,5300.,\n 6100.,7050.,7600.,8450.])\n n_range_s=np.array(['P','P','P','P','P','P','M','M','M','M','M','M','M','M'])\n if len(spectra[0])>2:\n snr = np.zeros([len(start_n),3])\n spectra[:,2][spectra[:,2]==0.] = spectra[:,2].max()\n else: \n snr = np.zeros([len(start_n),2])\n wav = spectra[:,0]\n for j in range(len(start_n)):\n if (start_n[j] < wav.max()) & (end_n[j] > wav.min()):\n _s = spectra[(wav>=start_n[j])&(wav<=end_n[j])]\n _w = _s[:,0]\n #Avoids gappy spectra\n k=3 # Check if there are more points than 3\n if len(_s)>k:\n #interpolate onto 10* resolution\n l = np.linspace(_w.min(),_w.max(),(len(_s)-1)*10+1)\n if len(spectra[0])>2:\n tck = interpolate.splrep(_w,_s[:,1],w=1/_s[:,2], s=1000)\n #median errors for max/mid point\n snr[j,2] = np.median(_s[:,2]) / np.sqrt(len(_w))\n else: tck = interpolate.splrep(_w,_s[:,1],s=0.0)\n f = interpolate.splev(l,tck)\n #find maxima and save\n if n_range_s[j]=='P': snr[j,0], snr[j,1] = l[f==f.max()][0], f.max()\n #find mean and save\n elif n_range_s[j]=='M': snr[j,0:2] = np.mean(l), np.mean(f)\n else: print('Unknown n_range_s, ignoring')\n snr = snr[ snr[:,0] != 0 ]\n #t parameter chosen by eye. Position of knots.\n if snr[:,0].max() < 6460: knots = [3000,4900,4100,4340,4860,int(snr[:,0].max()-5)]\n else: knots = [3885,4340,4900,6460]\n if snr[:,0].min() > 3885:\n print('Warning: knots used for spline norm unsuitable for high order fitting')\n knots=knots[1:]\n if (snr[:,0].min() > 4340) or (snr[:,0].max() < 4901): \n knots=None # 'Warning: knots used probably bad'\n if add_infinity: # Adds points at inf & 0 for spline to fit to err = mean(spec err)\n if snr.shape[1] > 2:\n mean_snr = np.mean(snr[:,2])\n snr = np.vstack([ snr, np.array([90000. ,0., mean_snr ]) ])\n snr = np.vstack([ snr, np.array([100000.,0., mean_snr ]) ])\n else:\n snr = np.vstack([ snr, np.array([90000.,0.]) ])\n snr = np.vstack([ snr, np.array([100000.,0.]) ])\n try: #weight by errors\n if len(spectra[0])>2: \n tck = interpolate.splrep(snr[:,0],snr[:,1], w=1/snr[:,2], t=knots, k=3)\n else: tck = interpolate.splrep(snr[:,0],snr[:,1], t=knots, k=3)\n except ValueError:\n knots=None\n if len(spectra[0])>2: \n tck = interpolate.splrep(snr[:,0],snr[:,1], w=1/snr[:,2], t=knots, k=3)\n else: tck = interpolate.splrep(snr[:,0],snr[:,1], t=knots, k=3)\n cont_flux = interpolate.splev(wav,tck).reshape(wav.size, 1)\n spectra_ret = np.copy(spectra)\n spectra_ret[:,1:] = spectra_ret[:,1:]/cont_flux\n return spectra_ret, cont_flux",
"def scale_sky_spectrum(wlm, sky_spectrum, spectra, cut_sky=4., fmax=10, fmin=1, valid_wave_min=0, valid_wave_max=0, \n fibre_list=[100,200,300,400,500,600,700,800,900], plot=True, verbose=True, warnings=True): \n \n# # Read sky lines provided by 2dFdr\n# sky_line_,flux_sky_line_ = read_table(\"sky_lines_2dfdr.dat\", [\"f\", \"f\"] )\n# # Choose those lines in the range\n# sky_line=[]\n# flux_sky_line=[]\n# valid_wave_min = 6240\n# valid_wave_max = 7355\n# for i in range(len(sky_line_)):\n# if valid_wave_min < sky_line_[i] < valid_wave_max:\n# sky_line.append(sky_line_[i])\n# flux_sky_line.append(flux_sky_line_[i])\n \n \n if valid_wave_min == 0: valid_wave_min = wlm[0]\n if valid_wave_max == 0: valid_wave_max = wlm[-1]\n \n if verbose: print(\"\\n> Identifying sky lines using cut_sky =\",cut_sky,\", allowed SKY/OBJ values = [\",fmin,\",\",fmax,\"]\")\n if verbose: print(\" Using fibres = \",fibre_list)\n\n peaks,peaks_name,peaks_rest,continuum_limits=search_peaks(wlm,sky_spectrum, plot=plot, cut=cut_sky, fmax=fmax, only_id_lines=False, verbose=False) \n\n ratio_list=[]\n valid_peaks=[]\n \n if verbose: print(\"\\n Sky line Gaussian ratio Flux ratio\")\n n_sky_lines_found=0\n for i in range(len(peaks)):\n sky_spectrum_data=fluxes(wlm,sky_spectrum, peaks[i], fcal=False, lowlow=50,highhigh=50, plot=False, verbose=False, warnings=False)\n \n sky_median_continuum = np.nanmedian(sky_spectrum_data[11])\n \n object_spectrum_data_gauss=[]\n object_spectrum_data_integrated=[] \n median_list=[]\n for fibre in fibre_list: \n object_spectrum_flux=fluxes(wlm, spectra[fibre], peaks[i], fcal=False, lowlow=50,highhigh=50, plot=False, verbose=False, warnings=False)\n object_spectrum_data_gauss.append(object_spectrum_flux[3]) # Gaussian flux is 3\n object_spectrum_data_integrated.append(object_spectrum_flux[7]) # integrated flux is 7\n median_list.append(np.nanmedian(object_spectrum_flux[11]))\n object_spectrum_data=np.nanmedian(object_spectrum_data_gauss)\n object_spectrum_data_i=np.nanmedian(object_spectrum_data_integrated)\n \n object_median_continuum=np.nanmin(median_list) \n \n if fmin < object_spectrum_data/sky_spectrum_data[3] * sky_median_continuum/object_median_continuum < fmax :\n n_sky_lines_found = n_sky_lines_found + 1\n valid_peaks.append(peaks[i])\n ratio_list.append(object_spectrum_data/sky_spectrum_data[3])\n if verbose: print(\"{:3.0f} {:5.3f} {:2.3f} {:2.3f}\".format(n_sky_lines_found,peaks[i],object_spectrum_data/sky_spectrum_data[3], object_spectrum_data_i/sky_spectrum_data[7])) \n\n\n #print \"ratio_list =\", ratio_list\n #fit = np.polyfit(valid_peaks, ratio_list, 0) # This is the same that doing an average/mean\n #fit_line = fit[0]+0*wlm\n fit_line =np.nanmedian(ratio_list) # We just do a median\n #fit_line = fit[1]+fit[0]*wlm\n #fit_line = fit[2]+fit[1]*wlm+fit[0]*wlm**2\n #fit_line = fit[3]+fit[2]*wlm+fit[1]*wlm**2+fit[0]*wlm**3\n \n \n if plot:\n plt.plot(valid_peaks,ratio_list,\"+\")\n #plt.plot(wlm,fit_line)\n plt.axhline(y=fit_line, color='k', linestyle='--')\n plt.xlim(valid_wave_min-10, valid_wave_max+10) \n #if len(ratio_list) > 0:\n plt.ylim(np.nanmin(ratio_list)-0.2,np.nanmax(ratio_list)+0.2)\n plt.title(\"Scaling sky spectrum to object spectra\")\n plt.xlabel(\"Wavelength [$\\mathrm{\\AA}$]\")\n plt.ylabel(\"OBJECT / SKY\")\n plt.minorticks_on()\n plt.show()\n plt.close()\n \n if verbose: print(\" Using this fit to scale sky spectrum to object, the median value is \",np.round(fit_line,3),\"...\") \n \n sky_corrected = sky_spectrum * fit_line\n\n# plt.plot(wlm,sky_spectrum, \"r\", alpha=0.3)\n# plt.plot(wlm,sky_corrected, \"g\", alpha=0.3)\n# plt.show()\n# plt.close()\n \n return sky_corrected, np.round(fit_line,3)",
"def test_scale_value(make_rampmodel):\n\n datmod = make_rampmodel(2, 2, 4, 2048, 2048)\n\n # Calculate the scale based off of the input.\n scale = datmod.meta.exposure.frame_divisor / datmod.meta.exposure.nframes\n\n output = GroupScaleStep.call(datmod)\n\n scale_from_data = np.unique(output.data / datmod.data)\n\n # Since the scale value is applied uniformly to the array, if we divide the output\n # by the input then we should get a single unique value (ie the scale) calculated\n # by the pipeline.\n assert len(scale_from_data) == 1\n\n # Make sure the scale calculated manually from the data model above matched what the\n # pipeline calculated.\n assert scale == scale_from_data[0]",
"def Fitzpactrick09(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def test_constructed_is_small(self):\n self.assertTrue(all(elt<10 for elt in goodwinsheaf.checkradii()))#check all entries have small radii",
"def Schlafly16(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def quality(\n wavelength: Union[Quantity, ndarray],\n flux: Union[Quantity, ndarray],\n mask: Optional[ndarray] = None,\n **kwargs,\n) -> float:\n flux = flux * u.dimensionless_unscaled # Turn into Quantity if not already\n flux = flux / flux.unit # Remove units from flux (sqrt(N_e) is unitless)\n\n wis = sqrt_sum_wis(wavelength, flux, mask=mask, **kwargs)\n q = wis / np.sqrt(np.nansum(flux))\n return q.value",
"def Fritz11(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def _scale_param(self, resid_us):\n return((resid_us**2).sum().sum() / self.dof)",
"def fluxes(wavelength, s, line, lowlow= 14, lowhigh=6, highlow=6, highhigh = 14, lmin=0, lmax=0, fmin=0, fmax=0, \n broad=2.355, plot=True, verbose=True, plot_sus = False, fcal = True, fit_continuum = True, median_kernel=35, warnings = True ): # Broad is FWHM for Gaussian sigma= 1,\n # s must be an array, no a list\n try: \n index_maximo_del_rango = s.tolist().index(np.nanmax(s))\n #print \" is AN ARRAY\"\n except Exception:\n #print \" s is A LIST -> must be converted into an ARRAY\" \n s = np.array(s)\n \n # Setup wavelength limits\n if lmin == 0 :\n lmin = line-65. # By default, +-65 A with respect to line\n if lmax == 0 :\n lmax = line+65.\n \n # Extract subrange to fit\n w_spec = []\n f_spec = []\n w_spec.extend((wavelength[i]) for i in range(len(wavelength)) if (wavelength[i] > lmin and wavelength[i] < lmax) ) \n f_spec.extend((s[i]) for i in range(len(wavelength)) if (wavelength[i] > lmin and wavelength[i] < lmax) ) \n \n if np.isnan(np.nanmedian(f_spec)): \n # The data are NAN!! Nothing to do\n if verbose or warnings: print(\" There is no valid data in the wavelength range [{},{}] !!\".format(lmin,lmax))\n \n resultado = [0, line, 0, 0, 0, 0, 0, 0, 0, 0, 0, s ] \n\n return resultado\n \n else: \n \n ## 20 Sep 2020\n f_spec_m=signal.medfilt(f_spec,median_kernel) # median_kernel = 35 default\n \n \n # Remove nans\n median_value = np.nanmedian(f_spec)\n f_spec = [median_value if np.isnan(x) else x for x in f_spec] \n \n \n # Setup min and max flux values in subrange to fit\n if fmin == 0 :\n fmin = np.nanmin(f_spec) \n if fmax == 0 :\n fmax = np.nanmax(f_spec) \n \n # We have to find some \"guess numbers\" for the Gaussian. Now guess_centre is line\n guess_centre = line\n \n # Define continuum regions: [-lowlow, -lowhigh] and [highlow,highhigh] in Angstroms with respect to guess_centre\n \n w_cont=[]\n f_cont=[]\n w_cont.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-lowlow and w_spec[i] < guess_centre-lowhigh) or (w_spec[i] > guess_centre+highlow and w_spec[i] < guess_centre+highhigh) ) \n f_cont.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-lowlow and w_spec[i] < guess_centre-lowhigh) or (w_spec[i] > guess_centre+highlow and w_spec[i] < guess_centre+highhigh) ) \n \n if fit_continuum:\n # Linear Fit to continuum \n f_cont_filtered=sig.medfilt(f_cont,np.int(median_kernel))\n #print line #f_cont\n # if line == 8465.0:\n # print w_cont\n # print f_cont_filtered\n # plt.plot(w_cont,f_cont_filtered)\n # plt.show()\n # plt.close()\n # warnings=True\n try: \n mm,bb = np.polyfit(w_cont, f_cont_filtered, 1)\n except Exception:\n bb = np.nanmedian(f_cont_filtered)\n mm = 0.\n if warnings: \n print(\" WARNING: Impossible to get the continuum!\")\n print(\" Scaling the continuum to the median value b = \",bb,\": cont = 0 * w_spec + \", bb)\n continuum = mm*np.array(w_spec)+bb \n c_cont = mm*np.array(w_cont)+bb \n \n else: \n # Median value in each continuum range # NEW 15 Sep 2019\n w_cont_low = []\n f_cont_low = []\n w_cont_low.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-lowlow and w_spec[i] < guess_centre-lowhigh) ) \n f_cont_low.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-lowlow and w_spec[i] < guess_centre-lowhigh) ) \n median_w_cont_low = np.nanmedian(w_cont_low)\n median_f_cont_low = np.nanmedian(f_cont_low)\n w_cont_high = []\n f_cont_high = []\n w_cont_high.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre+highlow and w_spec[i] < guess_centre+highhigh) ) \n f_cont_high.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre+highlow and w_spec[i] < guess_centre+highhigh) ) \n median_w_cont_high = np.nanmedian(w_cont_high)\n median_f_cont_high = np.nanmedian(f_cont_high) \n \n b = (median_f_cont_low-median_f_cont_high)/(median_w_cont_low-median_w_cont_high)\n a = median_f_cont_low- b * median_w_cont_low\n \n continuum = a + b*np.array(w_spec)\n c_cont = a + b*np.array(w_cont) \n \n \n # rms continuum\n rms_cont = np.nansum([ np.abs(f_cont[i] - c_cont[i]) for i in range(len(w_cont)) ]) / len(c_cont)\n \n # Search for index here w_spec(index) closest to line\n min_w = np.abs(np.array(w_spec)-line)\n mini = np.nanmin(min_w)\n # guess_peak = f_spec[min_w.tolist().index(mini)] # WE HAVE TO SUSTRACT CONTINUUM!!!\n guess_peak = f_spec[min_w.tolist().index(mini)] - continuum[min_w.tolist().index(mini)]\n \n # LOW limit\n low_limit=0\n w_fit = []\n f_fit = []\n w_fit.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-15 and w_spec[i] < guess_centre)) \n f_fit.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-15 and w_spec[i] < guess_centre)) \n if fit_continuum: \n c_fit=mm*np.array(w_fit)+bb \n else: \n c_fit=b*np.array(w_fit)+a \n \n fs=[]\n ws=[]\n for ii in range(len(w_fit)-1,1,-1):\n if f_fit[ii]/c_fit[ii] < 1.05 and f_fit[ii-1]/c_fit[ii-1] < 1.05 and low_limit == 0: low_limit = w_fit[ii]\n # if f_fit[ii]/c_fit[ii] < 1.05 and low_limit == 0: low_limit = w_fit[ii]\n fs.append(f_fit[ii]/c_fit[ii])\n ws.append(w_fit[ii])\n if low_limit == 0: \n sorted_by_flux=np.argsort(fs)\n try:\n low_limit = ws[sorted_by_flux[0]]\n except Exception:\n plot=True\n low_limit = 0\n \n # HIGH LIMIT \n high_limit=0\n w_fit = []\n f_fit = []\n w_fit.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre and w_spec[i] < guess_centre+15)) \n f_fit.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre and w_spec[i] < guess_centre+15)) \n if fit_continuum: \n c_fit=mm*np.array(w_fit)+bb \n else: \n c_fit=b*np.array(w_fit)+a\n \n fs=[]\n ws=[]\n for ii in range(len(w_fit)-1):\n if f_fit[ii]/c_fit[ii] < 1.05 and f_fit[ii+1]/c_fit[ii+1] < 1.05 and high_limit == 0: high_limit = w_fit[ii]\n # if f_fit[ii]/c_fit[ii] < 1.05 and high_limit == 0: high_limit = w_fit[ii]\n fs.append(f_fit[ii]/c_fit[ii])\n ws.append(w_fit[ii])\n if high_limit == 0: \n sorted_by_flux=np.argsort(fs)\n try:\n high_limit = ws[sorted_by_flux[0]] \n except Exception:\n plot=True\n high_limit = 0 \n \n # Guess centre will be the highest value in the range defined by [low_limit,high_limit]\n \n try: \n rango = np.where((high_limit >= wavelength ) & (low_limit <= wavelength)) \n index_maximo_del_rango = s.tolist().index(np.nanmax(s[rango]))\n guess_centre = wavelength[index_maximo_del_rango]\n except Exception:\n guess_centre = line #### It was 0 before\n \n \n # Fit a Gaussian to data - continuum \n p0 = [guess_centre, guess_peak, broad/2.355] # broad is the Gaussian sigma, 1.0 for emission lines\n try:\n fit, pcov = curve_fit(gauss, w_spec, f_spec-continuum, p0=p0, maxfev=10000) # If this fails, increase maxfev...\n fit_error = np.sqrt(np.diag(pcov))\n \n # New 28th Feb 2019: Check central value between low_limit and high_limit\n # Better: between guess_centre - broad, guess_centre + broad\n # If not, redo fit fixing central value to the peak (it does not work... just fix FWHM= (high_limit-low_limit)/2.5 )\n \n if verbose != False: print(\" ----------------------------------------------------------------------------------------\")\n # if low_limit < fit[0] < high_limit:\n if fit[0] < guess_centre - broad or fit[0] > guess_centre + broad:\n # if verbose: print \" Fitted center wavelength\", fit[0],\"is NOT in the range [\",low_limit,\",\",high_limit,\"]\"\n if verbose: print(\" Fitted center wavelength\", fit[0],\"is NOT in the expected range [\",guess_centre - broad,\",\",guess_centre + broad,\"]\")\n \n # print \"Re-do fitting fixing center wavelength\"\n # p01 = [guess_peak, broad]\n # fit1, pcov1 = curve_fit(gauss_fix_x0, w_spec, f_spec-continuum, p0=p01, maxfev=100000) # If this fails, increase maxfev...\n # fit_error1 = np.sqrt(np.diag(pcov1))\n # fit[0]=guess_centre\n # fit_error[0] = 0.\n # fit[1] = fit1[0]\n # fit_error[1] = fit_error1[0]\n # fit[2] = fit1[1]\n # fit_error[2] = fit_error1[1] \n \n fit[0]=guess_centre\n fit_error[0] = 0.000001\n fit[1]=guess_peak\n fit_error[1] = 0.000001\n fit[2] = broad/2.355\n fit_error[2] = 0.000001 \n else:\n if verbose: print(\" Fitted center wavelength\", fit[0],\"IS in the expected range [\",guess_centre - broad,\",\",guess_centre + broad,\"]\")\n \n \n if verbose: print(\" Fit parameters = \", fit[0], fit[1], fit[2])\n if fit[2] == broad and warnings == True : \n print(\" WARNING: Fit in\",fit[0],\"failed! Using given centre wavelength (cw), peak at (cv) & sigma = broad/2.355 given.\") \n gaussian_fit = gauss(w_spec, fit[0], fit[1], fit[2])\n \n \n # Estimate rms of the Gaussian fit in range [low_limit, high_limit]\n residuals = f_spec-gaussian_fit-continuum\n rms_fit = np.nansum([ ((residuals[i]**2)/(len(residuals)-2))**0.5 for i in range(len(w_spec)) if (w_spec[i] >= low_limit and w_spec[i] <= high_limit) ]) \n \n # Fluxes, FWHM and Eq. Width calculations\n gaussian_flux = gauss_flux(fit[1],fit[2])\n error1 = np.abs(gauss_flux(fit[1]+fit_error[1],fit[2]) - gaussian_flux)\n error2 = np.abs(gauss_flux(fit[1],fit[2]+fit_error[2]) - gaussian_flux)\n gaussian_flux_error = 1 / ( 1/error1**2 + 1/error2**2 )**0.5\n \n \n fwhm=fit[2]*2.355\n fwhm_error = fit_error[2] *2.355\n fwhm_vel = fwhm / fit[0] * C \n fwhm_vel_error = fwhm_error / fit[0] * C \n \n gaussian_ew = gaussian_flux/np.nanmedian(f_cont)\n gaussian_ew_error = gaussian_ew * gaussian_flux_error/gaussian_flux \n \n # Integrated flux\n # IRAF: flux = sum ((I(i)-C(i)) * (w(i2) - w(i1)) / (i2 - i2) \n flux = np.nansum([ (f_spec[i]-continuum[i])*(w_spec[i+1]-w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] >= low_limit and w_spec[i] <= high_limit) ]) \n flux_error = rms_cont * (high_limit - low_limit)\n wave_resolution = (wavelength[-1]-wavelength[0])/len(wavelength)\n ew = wave_resolution * np.nansum ([ (1 - f_spec[i]/continuum[i]) for i in range(len(w_spec)) if (w_spec[i] >= low_limit and w_spec[i] <= high_limit) ]) \n ew_error = np.abs(ew*flux_error/flux) \n gauss_to_integrated = gaussian_flux/flux * 100.\n \n index=0\n s_s=np.zeros_like(s)\n for wave in range(len(wavelength)):\n s_s[wave]=s[wave]\n if wavelength[wave] == w_spec[0] : \n s_s[wave] = f_spec[0]-gaussian_fit[0]\n index=1\n if wavelength[wave] > w_spec[0] and wavelength[wave] <= w_spec[-1]:\n s_s[wave] = f_spec[index]-gaussian_fit[index]\n index=index+1\n \n # Plotting \n ptitle = 'Fit: x0=%.2f y0=%.2e sigma=%.2f flux=%.2e rms=%.3e' % (fit[0], fit[1], fit[2], gaussian_flux, rms_fit)\n if plot :\n plt.figure(figsize=(10, 4))\n # Plot input spectrum\n plt.plot(np.array(w_spec),np.array(f_spec), \"b\", lw=3, alpha = 0.8)\n # Plot median input spectrum\n plt.plot(np.array(w_spec),np.array(f_spec_m), \"orange\", lw=3, alpha = 0.5) # 2021: era \"g\"\n # Plot spectrum - gauss subtracted\n plt.plot(wavelength,s_s,\"g\",lw=3, alpha = 0.6)\n \n plt.minorticks_on() \n plt.xlabel(\"Wavelength [$\\mathrm{\\AA}$ ]\")\n if fcal:\n plt.ylabel(\"Flux [ erg cm$^{-2}$ s$^{-1}$ $\\mathrm{\\AA}^{-1}$ ]\")\n else:\n plt.ylabel(\"Flux [ counts ]\")\n plt.xlim(lmin,lmax)\n plt.ylim(fmin,fmax)\n \n # Vertical line at guess_centre\n plt.axvline(x=guess_centre, color='r', linestyle='-', alpha=0.3)\n # Horizontal line at y = 0\n plt.axhline(y=0, color='k', linestyle=':', alpha=0.5) \n # Dashed green regions for continuum, defined by [lowlow, lowhigh] and [highlow,highhigh]\n plt.axvspan(guess_centre+highlow, guess_centre+highhigh, facecolor='g', alpha=0.15,zorder=3)\n plt.axvspan(guess_centre-lowlow, guess_centre-lowhigh, facecolor='g', alpha=0.15,zorder=3)\n # Plot linear fit for continuum\n plt.plot(w_spec, continuum,\"g--\")\n # Plot Gaussian fit \n plt.plot(w_spec, gaussian_fit+continuum, 'r-', alpha=0.8) \n # Vertical line at Gaussian center\n plt.axvline(x=fit[0], color='k', linestyle='-', alpha=0.5)\n # Vertical lines to emission line\n plt.axvline(x= low_limit, color='k', linestyle=':', alpha=0.5)\n plt.axvline(x= high_limit, color='k', linestyle=':', alpha=0.5) \n # Plot residuals\n plt.plot(w_spec, residuals, 'k')\n plt.title(ptitle)\n plt.show()\n \n # Printing results\n if verbose :\n print(\"\\n - Gauss and continuum fitting + integrated flux calculations:\\n\")\n print(\" rms continuum = %.3e erg/cm/s/A \" % (rms_cont)) \n print(\" Gaussian Fit parameters: x0 = ( %.2f +- %.2f ) A \" % (fit[0], fit_error[0]))\n print(\" y0 = ( %.3f +- %.3f ) 1E-16 erg/cm2/s/A\" % (fit[1]/1E-16, fit_error[1]/1E-16 ))\n print(\" sigma = ( %.3f +- %.3f ) A\" % (fit[2], fit_error[2])) \n print(\" rms fit = %.3e erg/cm2/s/A\" % (rms_fit))\n print(\" Gaussian Flux = ( %.2f +- %.2f ) 1E-16 erg/s/cm2 (error = %.1f per cent)\" % (gaussian_flux/1E-16, gaussian_flux_error/1E-16, gaussian_flux_error/gaussian_flux*100))\n print(\" FWHM = ( %.3f +- %.3f ) A = ( %.1f +- %.1f ) km/s \" % (fwhm, fwhm_error, fwhm_vel, fwhm_vel_error))\n print(\" Eq. Width = ( %.1f +- %.1f ) A\" % (-gaussian_ew, gaussian_ew_error)) \n print(\"\\n Integrated flux = ( %.2f +- %.2f ) 1E-16 erg/s/cm2 (error = %.1f per cent) \" % ( flux/1E-16, flux_error/1E-16, flux_error/flux *100)) \n print(\" Eq. Width = ( %.1f +- %.1f ) A\" % (ew, ew_error))\n print(\" Gauss/Integrated = %.2f per cent \" % gauss_to_integrated)\n \n \n # Plot independent figure with substraction if requested \n if plot_sus: plot_plot(wavelength,[s,s_s], xmin=lmin, xmax=lmax, ymin=fmin, ymax=fmax, fcal=fcal, frameon=True, ptitle=ptitle)\n \n # 0 1 2 3 4 5 6 7 8 9 10 11\n resultado = [rms_cont, fit[0], fit_error[0], gaussian_flux, gaussian_flux_error, fwhm, fwhm_error, flux, flux_error, ew, ew_error, s_s ]\n return resultado \n except Exception:\n if verbose: \n print(\" - Gaussian fit failed!\")\n print(\" However, we can compute the integrated flux and the equivalent width:\")\n \n flux = np.nansum([ (f_spec[i]-continuum[i])*(w_spec[i+1]-w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] >= low_limit and w_spec[i] <= high_limit) ]) \n flux_error = rms_cont * (high_limit - low_limit)\n wave_resolution = (wavelength[-1]-wavelength[0])/len(wavelength)\n ew = wave_resolution * np.nansum ([ (1 - f_spec[i]/continuum[i]) for i in range(len(w_spec)) if (w_spec[i] >= low_limit and w_spec[i] <= high_limit) ]) \n ew_error = np.abs(ew*flux_error/flux) \n \n if verbose:\n print(\" Integrated flux = ( %.2f +- %.2f ) 1E-16 erg/s/cm2 (error = %.1f per cent) \" % ( flux/1E-16, flux_error/1E-16, flux_error/flux *100)) \n print(\" Eq. Width = ( %.1f +- %.1f ) A\" % (ew, ew_error))\n \n resultado = [0, guess_centre, 0, 0, 0, 0, 0, flux, flux_error, ew, ew_error, s ] # guess_centre was identified at maximum value in the [low_limit,high_limit] range but Gaussian fit failed\n \n \n # Plotting \n if plot :\n plt.figure(figsize=(10, 4))\n plt.plot(np.array(w_spec),np.array(f_spec), \"b\", lw=3, alpha = 0.5)\n plt.minorticks_on() \n plt.xlabel(\"Wavelength [$\\mathrm{\\AA}$]\")\n if fcal:\n plt.ylabel(\"Flux [ erg cm$^{-2}$ s$^{-1}$ $\\mathrm{\\AA}^{-1}$ ]\")\n else:\n plt.ylabel(\"Flux [ counts ]\") \n plt.xlim(lmin,lmax)\n plt.ylim(fmin,fmax)\n \n # Vertical line at guess_centre\n plt.axvline(x=guess_centre, color='r', linestyle='-', alpha=0.5)\n # Horizontal line at y = 0\n plt.axhline(y=0, color='k', linestyle=':', alpha=0.5) \n # Dashed green regions for continuum, defined by [lowlow, lowhigh] and [highlow,highhigh]\n plt.axvspan(guess_centre+highlow, guess_centre+highhigh, facecolor='g', alpha=0.15,zorder=3)\n plt.axvspan(guess_centre-lowlow, guess_centre-lowhigh, facecolor='g', alpha=0.15,zorder=3)\n # Plot linear fit for continuum\n plt.plot(w_spec, continuum,\"g--\")\n # Plot Gaussian fit \n # plt.plot(w_spec, gaussian_fit+continuum, 'r-', alpha=0.8) \n # Vertical line at Gaussian center\n # plt.axvline(x=fit[0], color='k', linestyle='-', alpha=0.5)\n # Vertical lines to emission line\n plt.axvline(x= low_limit, color='k', linestyle=':', alpha=0.5)\n plt.axvline(x= high_limit, color='k', linestyle=':', alpha=0.5) \n # Plot residuals\n # plt.plot(w_spec, residuals, 'k')\n plt.title(\"No Gaussian fit obtained...\")\n plt.show()\n \n \n return resultado",
"def get_surfaceflux_from_wavelength_and_laser_power(wavelength, rover_specs, laser_powers, receiver_areas,\n power_reqs, pointing_error=[1e-7, 1e-7]):\n assert len(power_reqs) == len(receiver_areas)\n assert len(power_reqs) == len(rover_specs)\n\n # Set the parameter space\n trans_radius = np.logspace(-3, 1, 1000)\n altitudes = np.logspace(4, 7, 1001)\n R, Z = np.meshgrid(trans_radius, altitudes, indexing=\"ij\")\n\n fig, ax = plt.subplots(len(power_reqs), len(laser_powers), sharey=True, sharex=True, figsize=(12, 7))\n for i, laser_power in enumerate(laser_powers):\n for j in range(len(power_reqs)):\n rover_spec = rover_specs[j]\n receiver_area = receiver_areas[j]\n power_req = power_reqs[j]\n\n # Get the beam radius\n beam_radius = R * np.sqrt(1.0 + (Z * wavelength / (np.pi * R ** 2)) ** 2)\n receiver_radius = np.sqrt(receiver_area / np.pi)\n radius_constraint_one = pointing_error[j] * Z + receiver_radius\n radius_constraint_two = pointing_error[j] * Z + beam_radius\n mask_one = beam_radius < radius_constraint_one\n mask_two = receiver_radius > radius_constraint_two\n final_mask = np.logical_and(mask_one, np.logical_not(mask_two))\n beam_radius[final_mask] = np.nan\n\n # Calculate the resulting surface flux\n receiver_power = laser_power/ (np.pi * beam_radius ** 2) * receiver_area\n receiver_power[np.pi * beam_radius ** 2 < receiver_area] = laser_power\n receiver_power[receiver_power < power_req] = np.nan\n\n # Normalise result by input power to get total efficiency\n receiver_power /= laser_power\n receiver_power[receiver_power < 0.001] = np.nan\n\n log_power = np.log10(receiver_power * 100)\n ax[j, i].contourf(np.log10(R), Z / 1e3, log_power, 100)\n m = cm.ScalarMappable()\n m.set_array(log_power)\n m.set_clim(-1.0, 2.0)\n fig.colorbar(m, ax=ax[j, i])\n ax[j, 0].set_ylabel('{} \\n Transmission distance [km]'.format(rover_spec))\n ax[0, i].set_title('Laser Power: {}kW'.format(laser_power / 1e3))\n ax[1, i].set_xlabel('Logarithm of Transmitter Radius [m]')\n plt.tight_layout()\n plt.show()\n\n return beam_radius, receiver_power",
"def scaleLandsat(self,img):\n\t\tthermal = img.select(ee.List(['thermal'])).multiply(0.1)\n\t\tscaled = ee.Image(img).select(self.env.divideBands).multiply(ee.Number(0.0001))\n\t\t\n\t\treturn img.select([]).addBands(scaled).addBands(thermal)",
"def scale_volume(self, random_cell):\n\n # compute the volume to scale to\n composition = random_cell.composition\n total_volume = 0\n for specie in composition:\n total_volume += composition[specie]*self.vpas[specie.symbol]\n\n # scale the volume\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n random_cell.scale_lattice(total_volume)\n if str(random_cell.lattice.a) == 'nan' or \\\n random_cell.lattice.a > 100:\n return False\n else:\n return True",
"def ensquared_one_pix(array, pix_scale, new_scale=40, plot=True):\n\n n = int(new_scale // pix_scale)\n minPix, maxPix = (pix + 1 - n) // 2, (pix + 1 + n) // 2\n ens = array[minPix:maxPix, minPix:maxPix]\n # print(ens.shape)\n energy = np.sum(ens)\n\n if plot:\n mapp = 'viridis'\n f, (ax1, ax2) = plt.subplots(1, 2)\n ax1 = plt.subplot(1, 2, 1)\n square = Rectangle((minPix-0.5, minPix-0.5), n, n, linestyle='--', fill=None, color='white')\n ax1.add_patch(square)\n img1 = ax1.imshow(array, cmap=mapp)\n ax1.set_title('%.1f mas pixels' % (pix_scale))\n img1.set_clim(0, 1)\n plt.colorbar(img1, ax=ax1, orientation='horizontal')\n\n ax2 = plt.subplot(1, 2, 2)\n img2 = ax2.imshow(ens, cmap=mapp)\n ax2.set_title('%d mas window' %new_scale)\n img1.set_clim(0, 1)\n plt.colorbar(img2, ax=ax2, orientation='horizontal')\n\n return energy",
"def fun(self, x_s):\n i, q, u, v = [x_s[..., k] for k in range(4)]\n if np.min(i) < -np.spacing(np.max(i)):\n # negative intensity (trace of 2x2 block), obviously not PSD\n return np.inf\n else:\n i_pol = np.sqrt(q ** 2 + u ** 2 + v ** 2)\n i_diff = i - i_pol\n if np.min(i_diff) < -np.spacing(np.max(i_diff)):\n # polarized intensity higher than total (det of 2x2 block < 0)\n return np.inf\n else:\n return 0",
"def rscale(mag=10.0):\n if mag > 11.5:\n return 0.5\n elif mag > 11.0:\n return 1.0\n elif mag > 10.5:\n return 1.5\n elif mag > 10.0:\n return 1.5\n elif mag > 9.5:\n return 2.0\n elif mag > 9.0:\n return 2.5\n elif mag > 8.5:\n return 3.0\n else:\n return 3.5",
"def substract_given_gaussian(wavelength, spectrum, centre, peak=0, sigma=0, flux=0, search_peak=False, allow_absorptions = False,\n lowlow= 20, lowhigh=10, highlow=10, highhigh = 20, \n lmin=0, lmax=0, fmin=0, fmax=0, plot=True, fcal=False, verbose = True, warnings=True): \n do_it = False\n # Check that we have the numbers!\n if peak != 0 and sigma != 0 : do_it = True\n\n if peak == 0 and flux != 0 and sigma != 0:\n #flux = peak * sigma * np.sqrt(2*np.pi)\n peak = flux / (sigma * np.sqrt(2*np.pi))\n do_it = True \n\n if sigma == 0 and flux != 0 and peak != 0 :\n #flux = peak * sigma * np.sqrt(2*np.pi)\n sigma = flux / (peak * np.sqrt(2*np.pi)) \n do_it = True \n \n if flux == 0 and sigma != 0 and peak != 0 :\n flux = peak * sigma * np.sqrt(2*np.pi)\n do_it = True\n\n if sigma != 0 and search_peak == True: do_it = True \n\n if do_it == False:\n print(\"> Error! We need data to proceed! Give at least two of [peak, sigma, flux], or sigma and force peak to f[centre]\")\n s_s = spectrum\n else:\n # Setup wavelength limits\n if lmin == 0 :\n lmin = centre-65. # By default, +-65 A with respect to line\n if lmax == 0 :\n lmax = centre+65.\n \n # Extract subrange to fit\n w_spec = []\n f_spec = []\n w_spec.extend((wavelength[i]) for i in range(len(wavelength)) if (wavelength[i] > lmin and wavelength[i] < lmax) ) \n f_spec.extend((spectrum[i]) for i in range(len(wavelength)) if (wavelength[i] > lmin and wavelength[i] < lmax) ) \n \n # Setup min and max flux values in subrange to fit\n if fmin == 0 :\n fmin = np.nanmin(f_spec) \n if fmax == 0 :\n fmax = np.nanmax(f_spec) \n \n # Define continuum regions: [-lowlow, -lowhigh] and [highlow,highhigh] in Angstroms with respect to centre\n w_cont=[]\n f_cont=[]\n w_cont.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > centre-lowlow and w_spec[i] < centre-lowhigh) or (w_spec[i] > centre+highlow and w_spec[i] < centre+highhigh) ) \n f_cont.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > centre-lowlow and w_spec[i] < centre-lowhigh) or (w_spec[i] > centre+highlow and w_spec[i] < centre+highhigh) ) \n \n # Linear Fit to continuum \n try: \n mm,bb = np.polyfit(w_cont, f_cont, 1)\n except Exception:\n bb = np.nanmedian(spectrum)\n mm = 0.\n if verbose or warnings: \n print(\" WARNING! Impossible to get the continuum!\")\n print(\" Scaling the continuum to the median value\") \n continuum = mm*np.array(w_spec)+bb \n # c_cont = mm*np.array(w_cont)+bb \n # rms continuum\n # rms_cont = np.nansum([ np.abs(f_cont[i] - c_cont[i]) for i in range(len(w_cont)) ]) / len(c_cont)\n\n if search_peak:\n # Search for index here w_spec(index) closest to line\n try:\n min_w = np.abs(np.array(w_spec)-centre)\n mini = np.nanmin(min_w)\n peak = f_spec[min_w.tolist().index(mini)] - continuum[min_w.tolist().index(mini)]\n flux = peak * sigma * np.sqrt(2*np.pi) \n if verbose: print(\" Using peak as f[\",np.round(centre,2),\"] = \",np.round(peak,2),\" and sigma = \", np.round(sigma,2), \" flux = \",np.round(flux,2))\n except Exception:\n if verbose or warnings: print(\" Error trying to get the peak as requested wavelength is \",np.round(centre,2),\"! Ignoring this fit!\")\n peak = 0.\n flux = -0.0001\n \n no_substract = False\n if flux < 0:\n if allow_absorptions == False:\n if np.isnan(centre) == False:\n if verbose or warnings : print(\" WARNING! This is an ABSORPTION Gaussian! As requested, this Gaussian is NOT substracted!\")\n no_substract = True\n if no_substract == False: \n if verbose: print(\" Substracting Gaussian at {:7.1f} with peak ={:10.4f} sigma ={:6.2f} and flux ={:9.4f}\".format(centre, peak,sigma,flux))\n \n gaussian_fit = gauss(w_spec, centre, peak, sigma)\n \n \n index=0\n s_s=np.zeros_like(spectrum)\n for wave in range(len(wavelength)):\n s_s[wave]=spectrum[wave]\n if wavelength[wave] == w_spec[0] : \n s_s[wave] = f_spec[0]-gaussian_fit[0]\n index=1\n if wavelength[wave] > w_spec[0] and wavelength[wave] <= w_spec[-1]:\n s_s[wave] = f_spec[index]-gaussian_fit[index]\n index=index+1\n if plot: \n plt.figure(figsize=(10, 4))\n plt.plot(np.array(w_spec),np.array(f_spec), \"b\", lw=3, alpha = 0.5)\n plt.minorticks_on() \n plt.xlabel(\"Wavelength [$\\mathrm{\\AA}$]\")\n if fcal:\n plt.ylabel(\"Flux [ erg cm$^{-2}$ s$^{-1}$ $\\mathrm{\\AA}^{-1}$ ]\")\n else:\n plt.ylabel(\"Flux [ counts ]\")\n plt.xlim(lmin,lmax)\n plt.ylim(fmin,fmax)\n \n # Vertical line at line\n plt.axvline(x=centre, color='k', linestyle='-', alpha=0.8)\n # Horizontal line at y = 0\n plt.axhline(y=0, color='k', linestyle=':', alpha=0.5) \n # Dashed green regions for continuum, defined by [lowlow, lowhigh] and [highlow,highhigh]\n plt.axvspan(centre+highlow, centre+highhigh, facecolor='g', alpha=0.15,zorder=3)\n plt.axvspan(centre-lowlow, centre-lowhigh, facecolor='g', alpha=0.15,zorder=3)\n # Plot linear fit for continuum\n plt.plot(w_spec, continuum,\"g--\")\n # Plot Gaussian fit \n plt.plot(w_spec, gaussian_fit+continuum, 'r-', alpha=0.8) \n # Vertical lines to emission line\n #plt.axvline(x= low_limit, color='k', linestyle=':', alpha=0.5)\n #plt.axvline(x= high_limit, color='k', linestyle=':', alpha=0.5) \n # Plot residuals\n #plt.plot(w_spec, residuals, 'k')\n #plt.title('Fit: x0=%.2f y0=%.2e sigma=%.2f flux=%.2e rms=%.3e' % (fit[0], fit[1], fit[2], gaussian_flux, rms_fit))\n plt.show() \n plt.close()\n \n plt.figure(figsize=(10, 4))\n plt.plot(wavelength,spectrum, \"r\")\n plt.plot(wavelength,s_s, \"c\")\n plt.minorticks_on() \n plt.xlabel(\"Wavelength [$\\mathrm{\\AA}$]\")\n if fcal:\n plt.ylabel(\"Flux [ erg cm$^{-2}$ s$^{-1}$ $\\mathrm{\\AA}^{-1}$ ]\")\n else:\n plt.ylabel(\"Flux [ counts ]\")\n plt.xlim(lmin,lmax)\n plt.ylim(fmin,fmax)\n plt.show()\n plt.close()\n else:\n s_s = spectrum\n return s_s",
"def eeg_rms(array, axis=0):\t\t\n\treturn np.sqrt(np.mean(array ** 2,axis))",
"def Hosek18(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def evaluate_peak_norm(x, y, amplitude, x_0, y_0, r_in, width):\n rr = (x - x_0) ** 2 + (y - y_0) ** 2\n rr_in = r_in ** 2\n rr_out = (r_in + width) ** 2\n\n # Because np.select evaluates on the whole rr array\n # we have to catch the invalid value warnings\n # Note: for r > r_out 'np.select' fills automatically zeros!\n with np.errstate(invalid='ignore'):\n values = np.select([rr <= rr_in, rr <= rr_out],\n [np.sqrt(rr_out - rr) - np.sqrt(rr_in - rr),\n np.sqrt(rr_out - rr)])\n return amplitude * values / np.sqrt(rr_out - rr_in)",
"def scaling():\n \n for i in range(cfg.nfea):\n dm = 0\n var = 0\n for j in range(cfg.ntrain):\n dm += cfg.a[j,i]\n dm = dm/cfg.ntrain\n \n for j in range(cfg.ntrain):\n var += (cfg.a[j,i]-dm)**2\n\n var = var/cfg.ntrain\n var = np.sqrt(var)\n \n if var >= 10**(-5):\n cfg.clin[i] = 1.0/var \n cfg.dlin[i] = -dm/var \n \n else: \n if np.abs(dm)<=1.0:\n cfg.clin[i] = 1.0\n cfg.dlin[i] = 0.0 \n else: \n cfg.clin[i] = 1.0/dm\n cfg.dlin[i] = 0.0 \n \n for j in range(cfg.ntrain):\n cfg.a_scaled[j,i] = cfg.clin[i]*cfg.a[j,i] + cfg.dlin[i]\n \n return",
"def sr(self, params, r_s, sz_s, energy_s):\n\n self.itr += 1\n\n r_av = jnp.reshape(r_s[:,:,0:self.nav,:,:], (self.n_devices, self.nwalk // self.n_devices * self.nav, self.npart, self.ndim))\n sz_av = jnp.reshape(sz_s[:,:,0:self.nav,:,:], (self.n_devices, self.nwalk // self.n_devices * self.nav, self.npart, 2))\n energy_av = jnp.reshape(energy_s[:,:,0:self.nav], (self.n_devices, self.nwalk // self.n_devices * self.nav))\n\n r_val = jnp.reshape(r_s[:,:,0:self.nac,:,:], (self.n_devices, self.nwalk // self.n_devices * self.nac, self.npart, self.ndim))\n sz_val = jnp.reshape(sz_s[:,:,0:self.nac,:,:], (self.n_devices, self.nwalk // self.n_devices * self.nac, self.npart, 2))\n energy_val = jnp.reshape(energy_s[:,:,0:self.nac], (self.n_devices, self.nwalk // self.n_devices * self.nac))\n r_avg, r_max = self.observables.radius_pmap(r_val, sz_val)\n logger.info(f\"Maximum Radius validation=, {jnp.sqrt(r_max):.3f} \")\n logger.info(f\"Average Radius validation=, {jnp.sqrt(r_avg):.3f} \")\n psi_val = self.wavefunction.psi_pmap(params, r_val, sz_val)\n logger.info(f\"Maximum Psi validation= ,{jnp.max(jnp.abs(psi_val)):.4e} \")\n logger.info(f\"Average Psi validation= ,{jnp.mean(jnp.abs(psi_val)):.4e} \")\n logger.info(f\"Minimum Psi validation= ,{jnp.min(jnp.abs(psi_val)):.4e} \")\n\n r_tst = jnp.reshape(r_s[:,:,self.nav:self.nav+self.nac,:,:], (self.n_devices, self.nwalk // self.n_devices * self.nac, self.npart, self.ndim))\n sz_tst = jnp.reshape(sz_s[:,:,self.nav:self.nav+self.nac,:,:], (self.n_devices, self.nwalk // self.n_devices * self.nac, self.npart, 2))\n energy_tst = jnp.reshape(energy_s[:,:,self.nav:self.nav+self.nac], (self.n_devices, self.nwalk // self.n_devices * self.nac))\n r_avg, r_max = self.observables.radius_pmap(r_tst, sz_tst)\n logger.info(f\"Maximum Radius test=, {jnp.sqrt(r_max):.3f} \")\n logger.info(f\"Average Radius test=, {jnp.sqrt(r_avg):.3f} \")\n psi_tst = self.wavefunction.psi_pmap(params, r_tst, sz_tst)\n logger.info(f\"Maximum Psi test= ,{jnp.max(jnp.abs(psi_tst)):.4e} \")\n logger.info(f\"Average Psi test= ,{jnp.mean(jnp.abs(psi_tst)):.4e} \")\n logger.info(f\"Minimum Psi test= ,{jnp.min(jnp.abs(psi_tst)):.4e} \")\n\n if (self.solver == 'Adam'):\n dp_i, self.g2_i, self.m_i = self.adam(params, r_av, sz_av, energy_av, g2_i = self.g2_i, m_i = self.m_i, itr = self.itr)\n elif (self.solver == 'Cholesky'):\n dp_i, self.g2_i = self.sr_cholesky(params, r_av, sz_av, energy_av, g2_i = self.g2_i, itr = self.itr) \n elif (self.solver == 'Pseudo'):\n dp_i = self.sr_pseudo(params, r_av, sz_av, energy_av) \n elif (self.solver == 'CG'):\n dp_i, self.g2_i = self.sr_cg(params, r_av, sz_av, energy_av, dp0_i=self.dp_i, g2_i = self.g2_i, itr = self.itr) \n\n self.dp_i = dp_i\n energy_d_min= 1.\n converged = False\n dp_range = jnp.linspace(0.1, 0.8, self.nstep)\n for n in range (self.nstep):\n# dt = dt_range[n]\n dp_n = self.delta * dp_i#[n] #+ dp_range[n] * self.dp_o\n dp_max = jnp.max(jnp.abs(dp_n)) \n delta_p = self.wavefunction.unflatten_params(dp_n) \n# delta_p = self.wavefunction.unflatten_params( self.delta * dp_i ) \n\n psi_d_tst, psi_d_err_tst, energy_d_tst, energy_d_err_tst, overlap_tst = self.pmap_dist(delta_p, params, r_tst, sz_tst, psi_tst, energy_tst)\n dist_tst = jnp.arccos(jnp.sqrt(psi_d_tst))**2\n logger.debug(f\"dist acos tst = {dist_tst:.8f}\")\n logger.debug(f\"energy diff tst = {energy_d_tst:.6f}, err = {energy_d_err_tst:.6f}\")\n logger.debug(f\"overlap tst= { jnp.arccos(jnp.sqrt(overlap_tst))**2:.8f}\")\n \n psi_d_val, psi_d_err_val, energy_d_val, energy_d_err_val, overlap_val = self.pmap_dist(delta_p, params, r_val, sz_val, psi_val, energy_val)\n dist_val = jnp.arccos(jnp.sqrt(psi_d_val))**2\n logger.debug(f\"dist acos val = {dist_val:.8f}\")\n logger.debug(f\"energy diff val= {energy_d_val:.6f}, err = {energy_d_err_val:.6f}\")\n logger.debug(f\"overlap val = { jnp.arccos(jnp.sqrt(overlap_val))**2:.8f}\")\n\n logger.debug(f\"delta param max = {dp_max:.6f}\")\n logger.debug(f\"delta param avg = {jnp.linalg.norm(dp_n):.6f}\")\n\n if ( dist_tst < 0.1 and dist_val < 0.1 and energy_d_val < energy_d_min and dp_max < 0.5):\n energy_d_min = energy_d_val\n energy_d_err_min = energy_d_err_val\n delta_p_min = delta_p\n dp_n_min = dp_n\n converged = True\n if converged: \n logger.debug(f\"Converged, energy diff min = {energy_d_min:.6f}, err = {energy_d_err_min:.6f}\")\n else:\n logger.debug(f\"Not converged\")\n delta_p_min = self.wavefunction.unflatten_params(jnp.zeros(self.nparams))\n dp_n_min = jnp.zeros(self.nparams)\n return delta_p_min",
"def rms(a):\n\treturn np.sqrt(np.sum(np.power(a,2))/len(a))",
"def freq_optimization(self):\n index = identify_scale(self.vz, True)\n # In case the patient is limping\n if index > 35:\n index = index / 2\n print(f\"Scale used is {index}\")",
"def check_if_scaled(a):\r\n if a.split(\" \")[-1] == 's':\r\n return True\r\n else:\r\n return False",
"def analysis_function_ensquared(system, wavelength_idx, surface, config, px, py, box_size):\n\n det_pix = 15e-3 # Size of the detector pixel [mm]\n\n # Set Current Configuration\n system.MCE.SetCurrentConfiguration(config)\n\n # First of all, we need to find the Surface Number for the IMAGE SLICER \"Image Plane\"\n N_surfaces = system.LDE.NumberOfSurfaces\n surface_names = {} # A dictionary of surface number -> surface comment\n for k in np.arange(1, N_surfaces):\n surface_names[k] = system.LDE.GetSurfaceAt(k).Comment\n # find the Slicer surface number\n try:\n slicer_num = list(surface_names.keys())[list(surface_names.values()).index('Image Plane')]\n except ValueError:\n slicer_num = list(surface_names.keys())[list(surface_names.values()).index('IFU SRM FP')]\n slicer_surface = slicer_num\n # slicer = system.LDE.GetSurfaceAt(slicer_num)\n\n # Get the Field Points for that configuration\n sysField = system.SystemData.Fields\n N_fields = sysField.NumberOfFields\n N_waves = len(wavelength_idx)\n\n X_MAX = np.max([np.abs(sysField.GetField(i + 1).X) for i in range(N_fields)])\n Y_MAX = np.max([np.abs(sysField.GetField(i + 1).Y) for i in range(N_fields)])\n\n # Use the Field Point at the centre of the Slice\n fx, fy = sysField.GetField(2).X, sysField.GetField(2).Y\n hx, hy = fx / X_MAX, fy / Y_MAX # Normalized field coordinates (hx, hy)\n obj_xy = np.array([fx, fy])\n\n N_pupil = px.shape[0] # Number of rays in the Pupil for a given field point and wavelength\n N_rays = N_waves * N_pupil\n\n EE = np.empty(N_waves)\n sli_foc_xy = np.empty((N_waves, 2))\n det_foc_xy = np.empty((N_waves, 2))\n\n slicer_xy = np.empty((N_waves, N_pupil, 2))\n slicer_xy[:] = np.nan\n detector_xy = np.empty((N_waves, N_pupil, 2))\n detector_xy[:] = np.nan\n\n # (1) Run the raytrace up to the IMAGE SLICER\n raytrace = system.Tools.OpenBatchRayTrace()\n # remember to specify the surface to which you are tracing!\n rays_slicer = raytrace.CreateNormUnpol(N_rays, constants.RaysType_Real, slicer_surface)\n\n # Loop over all wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n\n for (p_x, p_y) in zip(px, py): # Add the ray to the RayTrace\n rays_slicer.AddRay(wave_idx, hx, hy, p_x, p_y, constants.OPDMode_None)\n\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n rays_slicer.StartReadingResults()\n checksum_slicer = 0\n for k in range(N_rays): # Get Raytrace results at the Image Slicer\n i_wave = k // N_pupil\n j_pupil = k % N_pupil\n # print(i_wave, j_pupil)\n output = rays_slicer.ReadNextResult()\n if output[2] == 0 and output[3] == 0:\n slicer_xy[i_wave, j_pupil, 0] = output[4]\n slicer_xy[i_wave, j_pupil, 1] = output[5]\n checksum_slicer += 1\n # this might have to change. We assume no vignetting should occur before the slicer\n # but for the MC this might happen\n if output[2] == 0 and output[3] != 0:\n vignetting_code = output[3]\n vignetting_surface = system.LDE.GetSurfaceAt(vignetting_code).Comment\n print(\"\\nConfig #%d\" % config)\n print(\"Vignetting at surface #%d: %s\" % (vignetting_code, vignetting_surface))\n\n if checksum_slicer < N_rays:\n raise ValueError('Some rays were lost before the Image Slicer')\n\n rays_slicer.ClearData()\n\n # Count how many rays fall inside a +- 1 mm window in Y, wrt the centroid\n slicer_cent_x = np.nanmean(slicer_xy[:, :, 0], axis=1)\n slicer_cent_y = np.nanmean(slicer_xy[:, :, 1], axis=1)\n sli_foc_xy[:, 0] = slicer_cent_x\n sli_foc_xy[:, 1] = slicer_cent_y\n\n # print(slicer_cent_y)\n below_slicer = slicer_xy[:, :, 1] < slicer_cent_y[:, np.newaxis] + 1.0 * box_size / 2\n above_slicer = slicer_xy[:, :, 1] > slicer_cent_y[:, np.newaxis] - 1.0 * box_size / 2\n inside_slicer = (np.logical_and(below_slicer, above_slicer))\n # print(inside_slicer[0, :10])\n\n # Now, for each wavelength, we calculate which rays fulfil the Image Slicer conditions\n index_valid_slicer = [np.argwhere(inside_slicer[i, :] == True)[:, 0] for i in range(N_waves)]\n # print(index_valid_slicer[1][:10])\n # print(index_valid_slicer[2][:10])\n\n # (2) Run the raytrace up to the DETECTOR\n # For speed, we re-use the same Raytrace, just define new rays!\n # raytrace_det = system.Tools.OpenBatchRayTrace()\n # Detector is always the last surface\n detector_surface = system.LDE.NumberOfSurfaces - 1\n rays_detector = raytrace.CreateNormUnpol(N_rays, constants.RaysType_Real, detector_surface)\n # Loop over all wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n for (p_x, p_y) in zip(px, py):\n rays_detector.AddRay(wave_idx, hx, hy, p_x, p_y, constants.OPDMode_None)\n\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n\n rays_detector.StartReadingResults()\n checksum_detector = 0\n # index_valid_detector = [] # Valid means they make it to the detector even if vignetted at the Slicer\n vignetted = []\n index_vignetted = []\n index_valid_detector = np.empty((N_waves, N_pupil))\n index_valid_detector[:] = np.nan\n for k in range(N_rays): # Get Raytrace results at the Detector\n i_wave = k // N_pupil\n j_pupil = k % N_pupil\n output = rays_detector.ReadNextResult()\n if output[2] == 0 and output[3] == 0: # ErrorCode & VignetteCode\n detector_xy[i_wave, j_pupil, 0] = output[4]\n detector_xy[i_wave, j_pupil, 1] = output[5]\n checksum_detector += 1\n index_valid_detector[i_wave, j_pupil] = j_pupil\n\n elif output[2] == 0 and output[3] != 0:\n # Some rays are vignetted\n vignetted.append([output[4], output[5]])\n detector_xy[i_wave, j_pupil, 0] = output[4]\n detector_xy[i_wave, j_pupil, 1] = output[5]\n checksum_detector += 1\n index_valid_detector[i_wave, j_pupil] = j_pupil\n index_vignetted.append(k)\n\n # index_valid_detector = np.array(index_valid_detector)\n # # print(index_valid_detector.shape)\n # # print(index_valid_detector)\n # index_valid_detector = index_valid_detector.reshape((N_waves, N_pupil))\n # # print(index_valid_detector.shape)\n\n rays_detector.ClearData()\n CastTo(raytrace, 'ISystemTool').Close()\n\n # (3) Calculate the ENSQUARED ENERGY\n # We only count the rays that where inside the slicer to begin with and the ones that make it to the detector\n for i_wave in range(N_waves):\n valid_both = []\n for k in range(N_pupil):\n # print(index_valid_detector[i_wave])\n if k in index_valid_slicer[i_wave] and k in index_valid_detector[i_wave]:\n valid_both.append(k)\n\n valid_det_x = detector_xy[i_wave, :, 0][valid_both]\n valid_det_y = detector_xy[i_wave, :, 1][valid_both]\n\n # Now, out of the VALID rays, we calculate which detector rays fall inside a 2x pixel box along X\n dcx = np.mean(valid_det_x) # Detector Centroid X\n dcy = np.mean(valid_det_y)\n det_foc_xy[i_wave] = [dcx, dcy]\n\n left_detector = valid_det_x < dcx + det_pix * box_size / 2\n right_detector = valid_det_x > dcx - det_pix * box_size / 2\n inside_detector = (np.logical_and(left_detector, right_detector))\n total_detector = np.sum(inside_detector)\n ensq = total_detector / N_pupil\n # print(ensq)\n EE[i_wave] = ensq * 0.98\n\n # SHOW THIS in the methodology\n\n # fig, axes = plt.subplots(2, N_waves)\n # colors = cm.Reds(np.linspace(0.5, 1, N_waves))\n # for j in range(N_waves):\n # ax1 = axes[0][j]\n # scy = sli_foc_xy[j, 1]\n # scx = sli_foc_xy[j, 0]\n # ax1.axhline(y=scy + 1.0 * box_size / 2, color='black', linestyle='--')\n # ax1.axhline(y=scy - 1.0 * box_size / 2, color='black', linestyle='--')\n # ax1.scatter(slicer_xy[j, :, 0], slicer_xy[j, :, 1], s=3, color=colors[j])\n # ax1.scatter(sli_foc_xy[j, 0], sli_foc_xy[j, 1], s=3, color='black')\n # wavelength = system.SystemData.Wavelengths.GetWavelength(wavelength_idx[j]).Wavelength\n # ax1.set_title(\"IMG SLI | %.3f $\\mu$m\" % wavelength)\n # ax1.set_aspect('equal')\n # ax1.get_yaxis().set_visible(False)\n # ax1.get_xaxis().set_visible(False)\n #\n # p = 1.2\n # ax1.set_xlim([scx - p * box_size / 2, scx + p * box_size / 2])\n # ax1.set_ylim([scy - p * box_size / 2, scy + p * box_size / 2])\n #\n # ax2 = axes[1][j]\n # dcx = det_foc_xy[j, 0]\n # dcy = det_foc_xy[j, 1]\n # ax2.scatter(detector_xy[j, :, 0], detector_xy[j, :, 1], s=3, color=colors[j])\n # ax2.scatter(det_foc_xy[j, 0], det_foc_xy[j, 1], s=3, color='black')\n # ax2.axvline(x=dcx + det_pix * box_size / 2, color='black', linestyle='--')\n # ax2.axvline(x=dcx - det_pix * box_size / 2, color='black', linestyle='--')\n # ax2.set_title(\"DET | %.3f $\\mu$m\" % wavelength)\n # ax2.set_aspect('equal')\n # ax2.get_yaxis().set_visible(False)\n # ax2.get_xaxis().set_visible(False)\n # ax2.set_xlim([dcx - p * det_pix * box_size / 2, dcx + p * det_pix * box_size / 2])\n # ax2.set_ylim([dcy - p * det_pix * box_size / 2, dcy + p * det_pix * box_size / 2])\n #\n #\n # plt.show()\n\n return EE, obj_xy, sli_foc_xy, det_foc_xy",
"def test_jam_axi_rms():\n np.random.seed(123)\n xbin, ybin = np.random.uniform(low=[-55, -40], high=[55, 40], size=[1000, 2]).T\n\n inc = 60. # Assumed galaxy inclination\n r = np.sqrt(xbin**2 + (ybin/np.cos(np.radians(inc)))**2) # Radius in the plane of the disk\n a = 40 # Scale length in arcsec\n vr = 2000*np.sqrt(r)/(r+a) # Assumed velocity profile\n vel = vr * np.sin(np.radians(inc))*xbin/r # Projected velocity field\n sig = 8700/(r+a) # Assumed velocity dispersion profile\n rms = np.sqrt(vel**2 + sig**2) # Vrms field in km/s\n\n surf = np.array([39483., 37158., 30646., 17759., 5955.1, 1203.5, 174.36, 21.105, 2.3599, 0.25493])\n sigma = np.array([0.153, 0.515, 1.58, 4.22, 10, 22.4, 48.8, 105, 227, 525])\n qObs = np.full_like(sigma, 0.57)\n\n distance = 16.5 # Assume Virgo distance in Mpc (Mei et al. 2007)\n mbh = 1e8 # Black hole mass in solar masses\n beta = np.full_like(surf, 0.3)\n\n surf_lum = surf # Assume self-consistency\n sigma_lum = sigma\n qobs_lum = qObs\n surf_pot = surf\n sigma_pot = sigma\n qobs_pot = qObs\n\n sigmapsf = 0.6\n pixsize = 0.8\n goodbins = r > 10 # Arbitrarily exclude the center to illustrate how to use goodbins\n\n # The model is similar but not identical to the adopted kinematics!\n rmsModel, ml, chi2, flux = jam_axi_rms(\n surf_lum, sigma_lum, qobs_lum, surf_pot, sigma_pot, qobs_pot,\n inc, mbh, distance, xbin, ybin, plot=True, rms=rms, sigmapsf=sigmapsf,\n beta=beta, pixsize=pixsize, tensor='zz', goodbins=goodbins)\n plt.pause(0.01)",
"def polar_scale_mean_filtration(scale_data):\n mean_data = np.mean(scale_data)\n for col in range(POLAR_IMAGE_WIDTH):\n if scale_data[col] < mean_data:\n scale_data[col] = 0",
"def FindScale(self):\n\n ## 6 and from the cv code the distance is 6 then we are good\n print(\"TODO: Very hard\")",
"def calc_scaled_waveform(\n waveform: np.ndarray,\n lin_factor: np.ndarray,\n pow2_factor: np.ndarray,\n logger=empty_logger()\n) -> np.ndarray:\n\n logger.info(\"scaling waveform\")\n\n return (\n 10e-9\n * (2 ** pow2_factor)\n * lin_factor\n * waveform.swapaxes(0, 1)\n ).swapaxes(0, 1)",
"def RiekeLebofsky85(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def rv_precision(\n wavelength: Union[Quantity, ndarray],\n flux: Union[Quantity, ndarray],\n mask: Optional[ndarray] = None,\n **kwargs,\n) -> Quantity:\n return c / sqrt_sum_wis(wavelength, flux, mask=mask, **kwargs)",
"def set_intrinsic_error(self, err_scale):\n if err_scale is None:\n self.switch_off_key(\"ERR_SCALE\")\n return\n elif len(err_scale) != len(self.get_filters()):\n raise ValueError(\"You must provide the number of intrinsic magnitudes as you have filters.\\n\"+\n \"(err_scale: {} vs. filters: {})\".format(err_scale, self.get_filters()))\n \n self.switch_on_key(\"ERR_SCALE\") # ignored if already on\n self.set_value(\"ERR_SCALE\",\",\".join([\"{}\".format(l) for l in err_scale]))",
"def sphere_sre(solution):\n a = 0\n bias = 0.2\n x = solution.get_x()\n x1 = x[:10]\n x2 = x[10:]\n value1 = sum([(i-bias)*(i-bias) for i in x1])\n value2 = 1/len(x) * sum([(i-bias)*(i-bias) for i in x2])\n return value1 + value2",
"def _process(self, data: np.array) -> np.array:\n # pylint: disable=no-member\n return unp.sqrt(data[..., 0] ** 2 + data[..., 1] ** 2) * self.scale",
"def simple_scaling(input_data):\n\n # Insert debugging assertions\n assert type(input_data) is np.ndarray, \"The 'input_data' must be numpy array.\"\n\n # Get the minimum values of the input numpy array along the axis \n Max = np.max(input_data, axis = 0)\n\n # Simple sclaing \n scaled_input_data = input_data / (Max + sys.float_info.min)\n\n # Return scaled input data\n return scaled_input_data",
"def test_euclidean_scale(self):\n\n s = space(curvature=0)\n\n magic = 77773.333773777773733\n for mul in (2, 5, 1/3, 1/11, magic, 1/magic):\n for name, dim in (\n ('sphere_s1', 1),\n ('sphere_v2', 2),\n ('sphere_s2', 2),\n ('sphere_v3', 3)\n ):\n self.assertTrue(isclose(\n getattr(s, name)(1) * mul**dim,\n getattr(s, name)(mul)\n ))",
"def check_wavelengths(self, wl):\n\n return (len(wl) == self.fm.instrument.wl) and \\\n all((wl-self.fm.instrument.wl) < 1e-2)",
"def scalesShearsAndSpectra(shape, numOfScales=None,\n realCoefficients=True, maxScale='max',\n shearletSpect=meyerShearletSpect,\n shearletArg=kutyniokaux, realReal=True,\n fftshift_spectra=True):\n if len(shape) != 2:\n raise ValueError(\"2D image dimensions required\")\n\n if numOfScales is None:\n numOfScales = _defaultNumberOfScales(shape)\n\n # rectangular images\n if shape[1] != shape[0]:\n rectangular = True\n else:\n rectangular = False\n\n # for better symmetry each dimensions of the array should be odd\n shape = np.asarray(shape)\n shape_orig = shape.copy()\n shapem = np.mod(shape, 2) == 0 # True for even sized axes\n both_even = np.all(np.equal(shapem, False))\n both_odd = np.all(np.equal(shapem, True))\n shape[shapem] += 1\n\n if not realCoefficients:\n warnings.warn(\"Complex shearlet case may be buggy. Doesn't \"\n \"currently give perfect reconstruction.\")\n\n if not (both_even or both_odd):\n # for some reason reconstruction is not exact in this case, so don't\n # allow it for now.\n raise ValueError(\"Mixture of odd and even axis sizes is currently \"\n \"unsupported.\")\n\n # create meshgrid\n # largest value where psi_1 is equal to 1\n maxScale = maxScale.lower()\n if maxScale == 'max':\n X = 2**(2 * (numOfScales - 1) + 1)\n elif maxScale == 'min':\n X = 2**(2 * (numOfScales - 1))\n else:\n raise ValueError('Wrong option for maxScale, must be \"max\" or \"min\"')\n\n xi_x_init = np.linspace(0, X, (shape[1] + 1) // 2)\n xi_x_init = np.concatenate((-xi_x_init[-1:0:-1], xi_x_init), axis=0)\n if rectangular:\n xi_y_init = np.linspace(0, X, (shape[0] + 1) // 2)\n xi_y_init = np.concatenate((-xi_y_init[-1:0:-1], xi_y_init), axis=0)\n else:\n xi_y_init = xi_x_init\n\n # create grid, from left to right, bottom to top\n [xi_x, xi_y] = np.meshgrid(xi_x_init, xi_y_init[::-1], indexing='xy')\n\n # cones\n C_hor = np.abs(xi_x) >= np.abs(xi_y) # with diag\n C_ver = np.abs(xi_x) < np.abs(xi_y)\n\n # number of shears: |-2^j,...,0,...,2^j| = 2 * 2^j + 1\n # now: inner shears for both cones:\n # |-(2^j-1),...,0,...,2^j-1|\n # = 2 * (2^j - 1) + 1\n # = 2^(j+1) - 2 + 1 = 2^(j+1) - 1\n # outer scales: 2 (\"one\" for each cone)\n # shears for each scale: hor: 2^(j+1) - 1, ver: 2^(j+1) - 1, diag: 2\n # -> hor + ver + diag = 2*(2^(j+1) - 1) +2 = 2^(j + 2)\n # + 1 for low-pass\n shearsPerScale = 2**(np.arange(numOfScales) + 2)\n numOfAllShears = 1 + shearsPerScale.sum()\n\n # init\n Psi = np.zeros(tuple(shape) + (numOfAllShears, ))\n # frequency domain:\n # k 2^j 0 -2^j\n #\n # 4 3 2 -2^j\n # \\ | /\n # (5)- x -1 0\n # / | \\\n # 2^j\n #\n # [0:-1:-2^j][-2^j:1:2^j][2^j:-1:1] (not 0)\n # hor ver hor\n #\n # start with shear -2^j (insert in index 2^j+1 (with transposed\n # added)) then continue with increasing scale. Save to index 2^j+1 +- k,\n # if + k save transposed. If shear 0 is reached save -k starting from\n # the end (thus modulo). For + k just continue.\n #\n # then in time domain:\n #\n # 2 1 8\n # \\ | /\n # 3- x -7\n # / | \\\n # 4 5 6\n #\n\n # lowpass\n Psi[:, :, 0] = shearletSpect(xi_x, xi_y, np.NaN, np.NaN, realCoefficients,\n shearletArg, scaling_only=True)\n\n # loop for each scale\n for j in range(numOfScales):\n # starting index\n idx = 2**j\n start_index = 1 + shearsPerScale[:j].sum()\n shift = 1\n for k in range(-2**j, 2**j + 1):\n # shearlet spectrum\n P_hor = shearletSpect(xi_x, xi_y, 2**(-2 * j), k * 2**(-j),\n realCoefficients, shearletArg)\n if rectangular:\n P_ver = shearletSpect(xi_y, xi_x, 2**(-2 * j), k * 2**(-j),\n realCoefficients, shearletArg)\n else:\n # the matrix is supposed to be mirrored at the counter\n # diagonal\n # P_ver = fliplr(flipud(P_hor'))\n P_ver = np.rot90(P_hor, 2).T # TODO: np.conj here too?\n if not realCoefficients:\n # workaround to cover left-upper part\n P_ver = np.rot90(P_ver, 2)\n\n if k == -2**j:\n Psi[:, :, start_index + idx] = P_hor * C_hor + P_ver * C_ver\n elif k == 2**j:\n Psi_idx = start_index + idx + shift\n Psi[:, :, Psi_idx] = P_hor * C_hor + P_ver * C_ver\n else:\n new_pos = np.mod(idx + 1 - shift, shearsPerScale[j]) - 1\n if(new_pos == -1):\n new_pos = shearsPerScale[j] - 1\n Psi[:, :, start_index + new_pos] = P_hor\n Psi[:, :, start_index + idx + shift] = P_ver\n\n # update shift\n shift += 1\n\n # generate output with size shape_orig\n Psi = Psi[:shape_orig[0], :shape_orig[1], :]\n\n # modify spectra at finest scales to obtain really real shearlets\n # the modification has only to be done for dimensions with even length\n if realCoefficients and realReal and (shapem[0] or shapem[1]):\n idx_finest_scale = (1 + np.sum(shearsPerScale[:-1]))\n scale_idx = idx_finest_scale + np.concatenate(\n (np.arange(1, (idx_finest_scale + 1) / 2 + 1),\n np.arange((idx_finest_scale + 1) / 2 + 2, shearsPerScale[-1])),\n axis=0)\n scale_idx = scale_idx.astype(np.int)\n if shapem[0]: # even number of rows -> modify first row:\n idx = slice(1, shape_orig[1])\n Psi[0, idx, scale_idx] = 1 / np.sqrt(2) * (\n Psi[0, idx, scale_idx] +\n Psi[0, shape_orig[1] - 1:0:-1, scale_idx])\n if shapem[1]: # even number of columns -> modify first column:\n idx = slice(1, shape_orig[0])\n Psi[idx, 0, scale_idx] = 1 / np.sqrt(2) * (\n Psi[idx, 0, scale_idx] +\n Psi[shape_orig[0] - 1:0:-1, 0, scale_idx])\n\n if fftshift_spectra:\n # Note: changed to ifftshift so roundtrip tests pass for odd sized\n # arrays\n Psi = np.fft.ifftshift(Psi, axes=(0, 1))\n\n # Add the following two lines to calculate the spectra of the Shearlet to comprass the size of the .npy file\n #Psi[..., 1] = np.sum(Psi[..., 1:], axis=-1)\n #Psi = Psi[..., :2]\n return Psi",
"def test_linewidth_spect(self, idx, a, fwhm):\n x = np.linspace(-1, 1, 100)\n sigma = fwhm / np.sqrt(8 * np.log(2))\n y = a * np.exp(-((x - x[idx]) ** 2) / (2 * sigma**2))\n\n lw_guess = guess.full_width_half_max(x, y, idx)\n\n self.assertAlmostEqual(fwhm, lw_guess, delta=0.1)",
"def airy_and_slicer(surface, wavelength, scale_mas, PSF_window, N_window):\n\n # Print message to know we are updating the cache\n print('Recalculating Airy Pattern for %.3f microns' % wavelength)\n\n # Plate scales [Px, Py] for each spaxel scale in mm / arcsec,\n # depending on the surface [IS: Image Slicer, DET: Detector]\n plate_scales = {'IS': {4.0: [125, 250], 60.0: [16.67, 16.67]},\n 'DET': {4.0: [3.75, 7.5], 60.0: [0.5, 0.5]}}\n plate_x = plate_scales[surface][scale_mas][0]\n plate_y = plate_scales[surface][scale_mas][1]\n\n # We know how many Microns the pixels of the Geometric PSF span [PSF_window / N_window]\n pix_sampling = PSF_window / N_window # micron at the detector plane\n # Using the plate scale we calculate how many m.a.s each of those pixels have to span\n pix_scale_x = pix_sampling / plate_x # milliarcsec / pixel\n pix_scale_y = pix_sampling / plate_y # milliarcsec / pixel\n\n # Calculate the relative size of the pupil aperture needed to ensure the PSF is\n # sampled with the given pix_scale at the focal plane\n ELT_DIAM = 39\n MILIARCSECS_IN_A_RAD = 206265000\n pix_rad_x = pix_scale_x / MILIARCSECS_IN_A_RAD # radians / pixel\n pix_rad_y = pix_scale_y / MILIARCSECS_IN_A_RAD\n RHO_APER_x = pix_rad_x * ELT_DIAM / (wavelength * 1e-6)\n RHO_APER_y = pix_rad_y * ELT_DIAM / (wavelength * 1e-6)\n RHO_OBSC_x = 0.30 * RHO_APER_x # ELT central obscuration\n RHO_OBSC_y = 0.30 * RHO_APER_y # ELT central obscuration\n\n # Sanity check\n PIX_RAD_x = RHO_APER_x * wavelength / ELT_DIAM * 1e-6\n PIX_RAD_y = RHO_APER_y * wavelength / ELT_DIAM * 1e-6\n PIX_MAS_x = PIX_RAD_x * MILIARCSECS_IN_A_RAD\n PIX_MAS_y = PIX_RAD_y * MILIARCSECS_IN_A_RAD\n\n # Define the ELT pupil mask. Note that we use a central obscuration too\n N = 2048\n x = np.linspace(-1, 1, N)\n xx, yy = np.meshgrid(x, x)\n\n # To get the anamorphic scaling we define the equation for an ellipse\n rho = np.sqrt((xx / RHO_APER_x) ** 2 + (yy / RHO_APER_y) ** 2)\n\n # (1) Propagate to the Image Slicer Focal plane\n elt_mask = (RHO_OBSC_x / RHO_APER_x < rho) & (rho < 1.0)\n pupil = elt_mask * np.exp(1j * elt_mask)\n image_electric = fftshift(fft2(pupil))\n\n if surface == 'IS':\n # print(\"IS\")\n # We are already at the Image Slicer, don't do anything else\n min_pix, max_pix = N // 2 - N_window // 2, N // 2 + N_window // 2\n final_psf = (np.abs(image_electric))**2\n final_psf /= np.max(final_psf)\n crop_psf = final_psf[min_pix:max_pix, min_pix:max_pix]\n\n elif surface == 'DET':\n # print(\"DET\")\n # (1.1) Add slicer effect by masking\n # We mask the PSF covering a band of size 1x SPAXEL, depending on the scale\n # If we have 4x4 mas, then we cover a band of 4 mas over the PSF\n x_min, x_max = -N/2 * PIX_MAS_x, N/2 * PIX_MAS_x\n y_min, y_max = -N/2 * PIX_MAS_y, N/2 * PIX_MAS_y\n x_slice = np.linspace(x_min, x_max, N, endpoint=True)\n y_slice = np.linspace(y_min, y_max, N, endpoint=True)\n x_grid, y_grid = np.meshgrid(x_slice, y_slice)\n slicer_mask = np.abs(y_grid) < scale_mas / 2\n\n # ## Show the PSF both in [mas] space where it should be circular and in [pixel] space where it should be anamorphic\n # fig, ax = plt.subplots(1, 1)\n # img1 = ax.imshow((np.abs(image_electric))**2, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # # plt.colorbar(img1, ax=ax)\n # ax.set_title(r'Airy Pattern | %.1f mas scale | Wavelength: %.3f $\\mu$m' % (scale_mas, wavelength))\n # ax.set_xlabel(r'X [mas]')\n # ax.set_ylabel(r'Y [mas]')\n # ax.set_xlim([-10, 10])\n # ax.set_ylim([-10, 10])\n #\n # fig, ax = plt.subplots(1, 1)\n # img1 = ax.imshow((np.abs(image_electric))**2, extent=[-N/2, N/2, -N/2, N/2], cmap='bwr')\n # ax.set_title(r'Airy Pattern | %.1f mas scale | Wavelength: %.3f $\\mu$m' % (scale_mas, wavelength))\n # ax.set_xlabel(r'Pixels [ ]')\n # ax.set_ylabel(r'Pixels [ ]')\n # ax.set_xlim([-100, 100])\n # ax.set_ylim([-100, 100])\n\n # plt.show()\n\n # (2) Propagate the masked electric field to Pupil Plane\n pup_grating = ifft2(fftshift(slicer_mask * image_electric))\n # (2.1) Add pupil mask, this time without the central obscuration\n aperture_mask = rho < 1.0\n\n # (3) Propagate back to Focal Plane\n final_focal = fftshift(fft2(aperture_mask * pup_grating))\n final_psf = (np.abs(final_focal))**2\n final_psf /= np.max(final_psf)\n\n # (4) Crop the PSF to fit to the necessary window to ease the convolutions\n min_pix, max_pix = N//2 - N_window//2, N//2 + N_window//2\n crop_psf = final_psf[min_pix:max_pix, min_pix:max_pix]\n\n # If we want to show the plots for Documentation\n\n # fig, (ax1, ax2, ax3) = plt.subplots(1, 3)\n # psf_airy = (np.abs(image_electric))**2\n # img1 = ax1.imshow(psf_airy, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # ax1.axhline(y=scale_mas/2, linestyle='--', color='black')\n # ax1.axhline(y=-scale_mas/2, linestyle='--', color='black')\n # ax1.set_xlabel(r'X [mas]')\n # ax1.set_ylabel(r'Y [mas]')\n # ax1.set_xlim([-15, 15])\n # ax1.set_ylim([-15, 15])\n # ax1.set_title(r'Airy Pattern | Slicer Mask %.1f mas' % scale_mas)\n #\n # img2 = ax2.imshow(aperture_mask * (np.abs(pup_grating)**2), extent=[-1, 1, -1, 1], cmap='bwr')\n # ax2.set_title(r'Pupil Plane | Aperture Mask')\n # ax2.set_xlim([-0.25, 0.25])\n # ax2.set_ylim([-0.25, 0.25])\n #\n # img3 = ax3.imshow(final_psf, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # ax3.set_xlabel(r'X [mas]')\n # ax3.set_ylabel(r'Y [mas]')\n # ax3.set_xlim([-15, 15])\n # ax3.set_ylim([-15, 15])\n # ax3.set_title(r'Diffraction Effects')\n # plt.show()\n\n return crop_psf",
"def scale(structure):\n from numpy.linalg import det\n if \"O\" in [atom.type for atom in structure]: spvol = 8.5**3/4e0\n elif \"Se\" in [atom.type for atom in structure]: spvol = 9.5**3/4e0\n elif \"Te\" in [atom.type for atom in structure]: spvol = 10.5**3/4e0\n else: raise ValueError(\"unknown atom.type: %s\" % (atom.type,))\n\n nfu = float(len(structure)/7)*0.5 # 0.5 because 2 f.u. in spinel unit-cell.\n vol = det(structure.cell)\n return (nfu * spvol / vol)**(1e0/3e0)",
"def apply_absorption_correction(qz, scale):\n global t\n global mu\n global wavelength\n for i in xrange(len(scale)):\n #a = wavelength * qz[i] / 4 / math.pi\n a = 1.18 * qz[i] / 4 / math.pi\n theta = math.asin(a)\n g = 2 / math.sin(theta)\n Ac = t * g / mu / (1-math.exp(-t*g/mu))\n scale[i] = Ac * scale[i]",
"def __bounds_check(self, *wavelengths: float):\n lowerb = self.spectrum[:, 0][0]\n upperb = self.spectrum[:, 0][-1]\n # See if the wavelength(s) is out of bounds, throw error\n for w in wavelengths:\n if not lowerb <= w <= upperb:\n print(\"Wavelength %0.2f nm out of spectra bounds\" % w)\n if w < lowerb:\n raise IndexError(\"Please use the lower bound of %0.2f nm.\" % lowerb)\n elif w > upperb:\n raise IndexError(\"Please use the upper bound of %0.2f nm.\" % upperb)\n else:\n pass\n return",
"def get_scale_parameter(self):\r\n \r\n if self.scale_parameter == 0.0: \r\n shape_in_gamma_func = float(1+(1/self.shape_parameter))\r\n gamma_func = special.gamma(shape_in_gamma_func)\r\n self.scale_parameter = (self.mean_fire_recurrence/gamma_func)\r\n return self.scale_parameter\r\n else:\r\n return self.scale_parameter",
"def RomanZuniga07(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def test_smoothing():\n spec = IGRINSSpectrum(file=file)\n new_spec = spec.remove_outliers(threshold=3)\n\n assert len(new_spec.flux) > 0\n assert new_spec.shape[0] <= spec.shape[0]\n assert new_spec.shape[0] > 0\n assert new_spec.mask is not None",
"def spectral_check(self, ):\r\n a, b = self.dfa, self.dfm.copy()\r\n b['ts_a']=a.ts\r\n b['flux_a'] = a.flux\r\n b['dflux'] = (b.flux-b.flux_a)/b.flux_unc\r\n b['eflux100_a'] = a.eflux100\r\n b['deflux'] = (b.eflux100-b.eflux100_a)/b.eflux100_unc\r\n b['pindex_a'] = a.pindex\r\n b['gdelta'] = (b.pindex-b.pindex_a)/b.pindex_unc\r\n self.dfm = b # since copy\r\n\r\n fig,axx = plt.subplots(1,2, figsize=(10,5), sharey=True)\r\n hkw = dict(bins=np.linspace(-5,5,51), histtype='step', lw=2, density=True)\r\n\r\n cut = (b.ts>50) & ~pd.isnull(b.deflux) & ~pd.isnull(b.gdelta) &\\\r\n (b.modelname==\"LogParabola\") & (b.pindex<3) & (b.pindex>0.5) &\\\r\n (b.e0>500) &(b.eflux100_unc>0) &(b.pindex_unc>0)\r\n self.check_total = sum(cut)\r\n for ax, title, val in zip(axx.flatten(), ['Energy Flux', 'Spectral index'], [b.deflux, b.gdelta]): \r\n\r\n df=val[cut]\r\n ax.hist(df.clip(-5,5), label='mean {:5.2f}\\nstd {:5.2f}'.format(df.mean(),df.std()), **hkw);\r\n ax.grid(alpha=0.5); \r\n x=np.linspace(-4,4)\r\n ax.plot(x, stats.norm.pdf(x), '--g' );\r\n ax.set(xlabel='normalized fit deviation', title=title, )\r\n ax.legend(loc='upper left',prop=dict(family='monospace'))\r\n fig.suptitle('Normalized devations of fit from model', fontsize=16);\r\n\r\n return fig",
"def analysis_function_rms_wfe(system, wave_idx, config, spaxels_per_slice, surface):\n\n # Set Current Configuration\n system.MCE.SetCurrentConfiguration(config)\n\n # Get the Field Points for that configuration\n sysField = system.SystemData.Fields\n\n N_fields = sysField.NumberOfFields\n wavelength = system.SystemData.Wavelengths.GetWavelength(wave_idx).Wavelength\n\n fx_min, fy_min = sysField.GetField(1).X, sysField.GetField(1).Y\n fx_max, fy_max = sysField.GetField(3).X, sysField.GetField(3).Y\n\n # This assumes Rectangular Normalization\n X_MAX = np.max([np.abs(sysField.GetField(i + 1).X) for i in range(3)])\n Y_MAX = np.max([np.abs(sysField.GetField(i + 1).Y) for i in range(3)])\n\n # Normalized field coordinates (hx, hy)\n hx_min, hx_max = fx_min / X_MAX, fx_max / X_MAX\n hy_min, hy_max = fy_min / Y_MAX, fy_max / Y_MAX\n\n # print(\"h_x : (%.3f, %.3f)\" % (hx_min, hx_max))\n # print(\"h_y : (%.3f, %.3f)\" % (hy_min, hy_max))\n\n hx = np.linspace(hx_min, hx_max, spaxels_per_slice)\n hy = np.linspace(hy_min, hy_max, spaxels_per_slice)\n\n # The Field coordinates for the Object\n obj_xy = np.array([X_MAX * hx, Y_MAX * hy]).T\n RMS_WFE = np.empty(spaxels_per_slice)\n foc_xy = np.empty((spaxels_per_slice, 2))\n global_xy = np.empty((spaxels_per_slice, 2))\n local_xyz = np.empty((spaxels_per_slice, 3))\n\n raytrace = system.Tools.OpenBatchRayTrace()\n normUnPolData = raytrace.CreateNormUnpol(spaxels_per_slice, constants.RaysType_Real, surface)\n\n theMFE = system.MFE\n nops = theMFE.NumberOfOperands\n theMFE.RemoveOperandsAt(1, nops)\n # build merit function\n op = theMFE.GetOperandAt(1)\n op.ChangeType(constants.MeritOperandType_CONF)\n op.GetOperandCell(constants.MeritColumn_Param1).Value = config\n\n # Pupil Sampling (4 means an 8x8 pupil grid)\n samp = 4\n wfe_op = constants.MeritOperandType_RWRE\n\n # Loop over all Spaxels in the Slice\n for i, (h_x, h_y) in enumerate(zip(hx, hy)):\n\n # operand = constants.MeritOperandType_RWRE\n # rms = system.MFE.GetOperandValue(operand, surface, wave_idx, h_x, h_y, 0.0, 0.0, 0.0, 0.0)\n # RMS_WFE[i] = wavelength * 1e3 * rms # We assume the Wavelength comes in Microns\n\n op = theMFE.AddOperand()\n op.ChangeType(wfe_op)\n op.GetOperandCell(constants.MeritColumn_Param1).Value = int(samp)\n op.GetOperandCell(constants.MeritColumn_Param2).Value = int(wave_idx)\n op.GetOperandCell(constants.MeritColumn_Param3).Value = float(h_x)\n op.GetOperandCell(constants.MeritColumn_Param4).Value = float(h_y)\n op.GetOperandCell(constants.MeritColumn_Weight).Value = 0\n\n # Add the ray to the RayTrace\n normUnPolData.AddRay(wave_idx, h_x, h_y, 0, 0, constants.OPDMode_None)\n\n # update merit function\n theMFE.CalculateMeritFunction()\n # retrieve value of each RWRE operand\n # theMCE.SetCurrentConfiguration(i)\n system.MCE.SetCurrentConfiguration(config)\n # print(\"N operands:\", nops)\n for irow in range(2, theMFE.NumberOfOperands + 1):\n op = theMFE.GetOperandAt(irow)\n rms = op.Value\n # print(irow)\n # print(\"RMS: %.2f nm\" % (wavelength * 1e3 * rms))\n RMS_WFE[irow - 2] = wavelength * 1e3 * rms # We assume the Wavelength comes in Microns\n\n # Run the RayTrace for the whole Slice\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n normUnPolData.StartReadingResults()\n for i in range(spaxels_per_slice):\n output = normUnPolData.ReadNextResult()\n if output[2] == 0:\n local_xyz[i, 0] = output[4]\n local_xyz[i, 1] = output[5]\n local_xyz[i, 2] = output[6]\n\n # Local Focal X Y\n foc_xy[i, 0] = output[4]\n foc_xy[i, 1] = output[5]\n\n # print(\"\\nConfiguration #%d\" % config)\n # print(\"fx: %.5f fy: %.5f\" % (X_MAX * hx[i], Y_MAX * hy[i]))\n # print(\"hx: %.5f hy: %.5f\" % (hx[i], hy[i]))\n # x_det, y_det = output[4], output[5]\n # print(\"x_det: %.3f y_det: %.3f\" % (x_det, y_det))\n\n # elif output[2] == 0 and output[3] != 0:\n # vignetting = output[3]\n # surf_name = system.LDE.GetSurfaceAt(vignetting).Comment\n # print(\"\\nWARNING: Vignetting at Surface #%d, Name %s\" % (vignetting, surf_name))\n # print(\"Wavelength: %.3f | Configuration #%d\" % (wavelength, config))\n # print(\"Field #%d | fx: %.5f fy: %.5f\" % (i + 1, X_MAX * hx[i], Y_MAX * hy[i]))\n # print(\"hx: %.5f hy: %.5f\" % (hx[i], hy[i]))\n # x_det, y_det = output[4], output[5]\n # print(\"x_det: %.3f y_det: %.3f\" % (x_det, y_det))\n #\n # # Add the local coordinates despite the vignetting\n # local_xyz[i, 0] = output[4]\n # local_xyz[i, 1] = output[5]\n # local_xyz[i, 2] = output[6]\n\n\n normUnPolData.ClearData()\n CastTo(raytrace, 'ISystemTool').Close()\n\n # # Get the transformation from Local to Global Coordinates\n # global_mat = system.LDE.GetGlobalMatrix(surface)\n # R11, R12, R13, R21, R22, R23, R31, R32, R33, X0, Y0, Z0 = global_mat[1:]\n # global_matrix = np.array([[R11, R12, R13],\n # [R21, R22, R23],\n # [R31, R32, R33]])\n # offset = np.array([X0, Y0, Z0])\n #\n # # Transform from Local to Global and only save X and Y\n # global_xyz = (np.dot(global_matrix, local_xyz.T)).T + offset\n # global_xy = global_xyz[:, :2]\n\n return [RMS_WFE, obj_xy, foc_xy, global_xy]",
"def perform_point_interpolation(sub_sample_wvl, sub_sample_rad, center_wv):\n # let us define spectral resolution\n\n print(center_wv)\n dframe = pd.DataFrame()\n\n sampled_wvl = np.arange(min(sub_sample_wvl), max(sub_sample_wvl), 2)\n fit_params = interp1d(sub_sample_wvl, sub_sample_rad, kind='slinear')\n fitted_val = fit_params(sampled_wvl)\n dframe['wavelength'] = sampled_wvl\n dframe['rad'] = fitted_val\n return dframe",
"def aperture_phot(self,data,x,y,v):\n r = np.sqrt((x-self.avg_map_fits['Values'][1])**2 + (y-self.avg_map_fits['Values'][3])**2)\n \n inner = (r < 8./60.) & np.isfinite(data) \n outer = (r > 8.5/60.) & (r < 12./60.) & np.isfinite(data)\n\n annu = np.nanmedian(data[outer])\n annu_rms = np.nanstd(data[outer])\n flux = np.sum(data[inner]) - annu*np.sum(inner)\n\n c = 3e8\n kb=1.38e-23\n beam = (1./60.*np.pi/180.)**2\n factor = 2*kb*(v*1e9/c)**2 * beam * 1e26\n return flux*factor, annu_rms*np.sqrt(np.sum(inner))*factor",
"def scale(arrayin,Amin,Amax,mask=None):\r\n if (mask==None) and (arrayin.max() - arrayin.min())!=0.0 :\r\n Bmax = arrayin.max()\r\n Bmin = arrayin.min()\r\n elif (arrayin.max() - arrayin.min())!=0.0 :\r\n ny = arrayin.shape[0]\r\n nx = arrayin.shape[1]\r\n Bmax = arrayin.min()\r\n Bmin = arrayin.max()\r\n for i in range(ny):\r\n for j in range(ny):\r\n if mask[i,j] > 0.5e0 :\r\n if arrayin[i,j] < Bmin :\r\n Bmin = arrayin[i,j]\r\n if arrayin[i,j] > Bmax :\r\n Bmax = arrayin[i,j]\r\n else :\r\n print \"andrew.bagOfns.scale : warning (arrayin.max() - arrayin.min())=0.0 \"\r\n return np.copy(arrayin)\r\n\r\n arrayout = (arrayin - Bmin)*(Amax - Amin) / (Bmax - Bmin) + Amin\r\n return arrayout",
"def scaleto11(val,check=True):\n if type(val) is not list and type(val) is not np.ndarray:\n val = [val]\n if type(val) is list:\n val = np.array(val)\n assert type(val) is np.ndarray\n assert not check or np.all((val==0) + (val==1))\n return val*2-1",
"def test_sphere(self):\n fun = get_problem('sphere', self.dimension)\n self.assertEqual(fun(self.array), 0.0)",
"def magnitude_too_small(mag, lon, lat, config):\n pt = Point((lon, lat))\n for boxname in sorted(config['boxes']):\n boxdict = config['boxes'][boxname]\n if pt.within(boxdict['poly']):\n if mag >= boxdict['mag']:\n return False\n else:\n return True\n #\n # Not in any boxes\n #\n if mag >= config['minmag']:\n return False\n\n return True",
"def evaluate_peak_norm(x, y, amplitude, x_0, y_0, r_0):\n rr = (x - x_0) ** 2 + (y - y_0) ** 2\n rr_0 = r_0 ** 2\n\n # Because np.select evaluates on the whole rr array\n # we have to catch the invalid value warnings\n with np.errstate(invalid='ignore'):\n values = np.select([rr <= rr_0, rr > rr_0], [np.sqrt(rr_0 - rr), 0])\n return amplitude * values / r_0",
"def test_equivalent_width():\n\n spec = IGRINSSpectrum(file=file)\n mu = np.median(spec.wavelength.value)\n equivalent_width = spec.measure_ew(mu)\n\n assert equivalent_width is not None\n assert type(equivalent_width) is not int\n assert type(equivalent_width) is astropy.units.quantity.Quantity\n new_unit = equivalent_width.to(spec.wavelength.unit)\n assert new_unit.unit == spec.wavelength.unit",
"def plotBeamSize( self, Srange, eps, delP, plane = 'x', scaleXY = 1e2, save = 0 ):\n from Tools import sigm\n from VisualSpecs import myColors as colors \n from VisualSpecs import align_yaxis\n\n condition = (self.df.S > self.Smax - Srange) & (self.df.S <= self.Smax)\n slFr = self.df[condition]\n print('slected last', Srange, 'm upstream. Scale factor =', scaleXY)\n # init the plot and split x\n #\n fig = plt.figure( figsize = (20,10) ); ax = fig.add_subplot(111)\n twin = ax.twinx()\n\n # plot physical aperture\n #\n maxAper = self.df.APER.max()\n print('maximum aperture found:', maxAper)\n\n ax.plot( slFr.S, slFr.APER*scaleXY, lw = 3., color = colors[11] )\n ax.plot( slFr.S, -slFr.APER*scaleXY, lw = 3., color = colors[11] )\n ax.set_ylabel('aperture [cm]'); ax.set_ylim( -(maxAper+maxAper/10)*scaleXY, (maxAper+maxAper/10)*scaleXY )\n\n \n twin.set_ylabel('beam size $\\\\sigma$ [cm]')\n \n if plane == 'x':\n\n twin.plot( slFr.S, sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[2], label = '$\\\\sigma_x$' ) \n twin.plot( slFr.S, -sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[2] )\n\n twin.plot( slFr.S, 10*sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[3], ls = '--', label = '10$\\\\sigma_x$') \n twin.plot( slFr.S, -10*sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[3], ls = '--' ) # \n\n twin.plot( slFr.S, 20*sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[4], ls = ':', label = '20$\\\\sigma_x$' ) \n twin.plot( slFr.S, -20*sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[4], ls = ':' ) # \n align_yaxis(ax, 0, twin, 0); twin.set_ylim( -(maxAper+maxAper/10)*scaleXY, (maxAper+maxAper/10)*scaleXY ) \n\n plt.legend() \n plt.title('horizontal beam size and physical aperture')\n if save: print('saving fig ...'); plt.savefig( self.plotpath + 'physAprt_hrzt_beamSize100m.pdf', bbox_inches = 'tight', dpi = 70)\n \n else:\n\n twin.plot( slFr.S, sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[2], label = '$\\\\sigma_y$' ) \n twin.plot( slFr.S, -sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[2] )\n\n twin.plot( slFr.S, 10*sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[3], ls = '--', label = '10$\\\\sigma_y$') \n twin.plot( slFr.S, -10*sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[3], ls = '--' ) # \n\n twin.plot( slFr.S, 20*sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[4], ls = ':', label = '20$\\\\sigma_y$' ) \n twin.plot( slFr.S, -20*sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[4], ls = ':' ) # \n align_yaxis(ax, 0, twin, 0); twin.set_ylim( -(maxAper+maxAper/10)*scaleXY, (maxAper+maxAper/10)*scaleXY )\n\n plt.legend()\n plt.title('vertical beam size and physical aperture')\n if save: print('saving fig ...'); plt.savefig( self.plotpath + 'physAprt_vrt_beamSize100m.pdf', bbox_inches = 'tight', dpi = 70)\n\n return fig",
"def absorbance( self, lmin=0, lmax=0 ):\n A = self.prop[\"SPEC\"][:,1]\n if lmax>0:\n m = np.vstack( (self.wavelength(), A) ).T # matrix w lambda and absorbance\n m = m[ m[:,0] >= lmin ] # slice by wavelength...\n m = m[ m[:,0] <= lmax ]\n return np.average( m[:,1] ) # scalar\n return A # array",
"def _curve_constrain(self, x, idx, sign):\n x = x.reshape(1, -1)\n pdf = np.exp(self.ks_gaussian.score_samples(x))\n if self.band[0] < pdf < self.band[1]:\n value = sign * self.pca.inverse_transform(x)[0][idx]\n else:\n value = 1E6\n return value",
"def scale(x: np.ndarray) -> tuple[FloatArray, BoolArray]:\n logx = np.log2(x + 1)\n mask_1d = ~np.isclose(np.nanstd(logx, axis=0), 0.0)\n scaled_x = standardize(logx[:, mask_1d], axis=0)\n scaled_x[np.isnan(scaled_x)] = 0\n return scaled_x, mask_1d",
"def _eta_sfr_scaling(self,x,q):\n i = self.enum[q]\n A = self.scaling_params['A'][i]\n b = self.scaling_params['b'][i]\n return A*x**b",
"def sbound(self, u, s):\n sele = u.select_atoms(s)\n calc = u.select_atoms('name CAL')\n \n dist = MDAnalysis.analysis.distances.distance_array(calc.coordinates(), sele.coordinates())\n for i, row in enumerate(dist):\n \n if any([d<2.5 for d in row]):\n\treturn (True, i)\n return (False, -1)",
"def get_scale():\r\n\r\n \r\n return 0.5",
"def reScaleLandsat(self,img):\n \n\t\tthermalBand = ee.List(['thermal'])\n\t\tthermal = ee.Image(img).select(thermalBand).multiply(10)\n \n\t\totherBands = ee.Image(img).bandNames().removeAll(thermalBand)\n\t\tscaled = ee.Image(img).select(otherBands).divide(0.0001)\n \n\t\timage = ee.Image(scaled.addBands(thermal)).int16()\n \n\t\treturn image.copyProperties(img)",
"def run(self):\n #calculate platescale of first input image\n try:\n det = np.linalg.det(wcs.WCS(self.datain[0].header).wcs.cd)\n pscale = np.sqrt(np.abs(det))*3600.\n except:\n try:\n det = np.linalg.det(wcs.WCS(self.datain[0].header).wcs.pc)\n pscale = np.sqrt(np.abs(det))*3600.\n except:\n pscale = self.datain[0].header['PIXSCAL']\n #filtering out images which are too far away from the others\n #passing images added to a list of (image, WCS) tuples\n '''\n image_centers = []\n for f in self.datain:\n image_centers.append((f.header['CRVAL1'], f.header['CRVAL2']))\n filtered_datain = []\n dist_list = [[[0]*(len(image_centers)-1)]*len(image_centers)]\n for i in range(len(image_centers)):\n for j in range(len(image_centers)-1):\n dist_list[i][j+1] = np.sqrt((image_)**2+()**2)\n '''\n #calculations necessary for updating wcs information\n px = []\n py = []\n \n #in order to avoid NaN interactions, creating weight map\n weights=[]\n for f in self.datain:\n weights.append((np.where(np.isnan(f.image) == True, 0, 1)))\n \n for f in self.datain:\n px.extend(wcs.WCS(f.header).calc_footprint()[:,0])\n py.extend(wcs.WCS(f.header).calc_footprint()[:,1])\n x0 = (max(px)+min(px))/2.\n y0 = (max(py)+min(py))/2.\n sx = (max(px)-min(px))*np.cos(y0/180*np.pi) # arcsec\n sy = (max(py)-min(py)) # arcsec\n size = (sx*3600+self.getarg('pad')*2, sy*3600+self.getarg('pad')*2)\n xpix = size[0]//pscale\n ypix = size[1]//pscale\n cdelt = [pscale/3600.]*2\n \n #create self.dataout and give it a copy of an input's header\n self.dataout = DataFits(config = self.config)\n self.dataout.header = self.datain[0].header.copy()\n \n #update header wcs information\n self.log.info('Creating new WCS header')\n \n self.dataout.header['CRPIX1'] = xpix/2\n self.dataout.header['CRPIX2'] = ypix/2\n self.dataout.header['CRVAL1'] = x0\n self.dataout.header['CRVAL2'] = y0\n self.dataout.header['CD1_1'] = -cdelt[0]\n self.dataout.header['CD1_2'] = self.dataout.header['CD2_1'] = 0.\n self.dataout.header['CD2_2'] = cdelt[1]\n self.dataout.header['NAXIS1'] = int(xpix)\n self.dataout.header['NAXIS2'] = int(ypix)\n self.dataout.header['CTYPE1'] = 'RA---TAN-SIP'\n self.dataout.header['CTYPE2'] = 'DEC--TAN-SIP'\n self.dataout.header['RADESYS'] = 'ICRS'\n self.dataout.header['EQUINOX'] = 2000\n self.dataout.header['LATPOLE'] = self.datain[0].header['CRVAL2']\n self.dataout.header['LONPOLE'] = 180\n self.dataout.header['PIXASEC'] = pscale\n \n theta_rad = np.deg2rad(self.getarg('outangle'))\n rot_matrix = np.array([[np.cos(theta_rad), -np.sin(theta_rad)], \n [np.sin(theta_rad), np.cos(theta_rad)]])\n rot_cd = np.dot(rot_matrix, np.array([[self.dataout.header['CD1_1'], 0.],[0., self.dataout.header['CD2_2']]]))\n for i in [0,1]:\n for j in [0,1]:\n self.dataout.header['CD{0:d}_{1:d}'.format(i+1, j+1)] = rot_cd[i,j]\n \n #check drizzle arguments\n if self.getarg('kernel') == 'smoothing':\n kernel = 'lanczos3'\n elif self.getarg('kernel') in ['square', 'point', 'gaussian', 'tophat']:\n kernel = self.getarg('kernel')\n else:\n self.log.error('Kernel name not recognized, using default')\n kernel = 'square'\n if self.getarg('drizzleweights') == 'uniform':\n driz_wt = ''\n elif self.getarg('drizzleweights') in ['exptime', 'expsq']:\n driz_wt = self.getarg('drizzleweights')\n else:\n self.log.error('Drizzle weighting not recognized, using default')\n driz_wt = ''\n \n #create drizzle object and add input images\n fullwcs = wcs.WCS(self.dataout.header)\n self.log.info('Starting drizzle')\n driz = drz.Drizzle(outwcs = fullwcs, pixfrac=self.getarg('pixfrac'), \\\n kernel=kernel, fillval='10000', wt_scl=driz_wt)\n for i,f in enumerate(self.datain):\n self.log.info('Adding %s to drizzle stack' % f.filename)\n driz.add_image(f.imgdata[0], wcs.WCS(f.header), inwht=weights[i])\n \n try:\n fillval=float(self.getarg('fillval'))\n except:\n fillval=np.nan\n self.log.error('Fillvalue not recognized or missing, using default')\n \n #creates output fits file from drizzle output\n self.dataout.imageset(np.where(driz.outsci == 10000, fillval, driz.outsci))\n self.dataout.imageset(driz.outwht,'OutWeight', self.dataout.header)\n self.dataout.filename = self.datain[0].filename\n\n #add history\n self.dataout.setheadval('HISTORY','Coadd: %d files combined with %s kernel, pixfrac %f at %f times resolution' \\\n % (len(self.datain), kernel, self.getarg('pixfrac'), self.getarg('resolution')))",
"def pdf(self, x, *args, **kwds):\n args, loc, scale = self._parse_args(*args, **kwds)\n x, loc, scale = map(np.asarray, (x, loc, scale))\n args = tuple(map(np.asarray, args))\n dtyp = np.find_common_type([x.dtype, np.float64], [])\n x = np.asarray((x - loc) / scale, dtype=dtyp)\n cond0 = self._argcheck(*args) & (scale > 0)\n cond1 = self._support_mask(x) & (scale > 0)\n cond = cond0 & cond1\n output = np.zeros(np.shape(cond), dtyp)\n np.putmask(output, (1 - cond0) + np.isnan(x), self.badvalue)\n if np.any(cond):\n goodargs = argsreduce(cond, *((x,) + args + (scale,)))\n s, goodargs = goodargs[-1], goodargs[:-1]\n # use trapezoidal integration rule to estimate normalization factor\n # # end = (np.max(x) + np.max(goodargs[1]) + 2 * np.max(goodargs[2]) + 1) * 4\n #\n # end = np.max([np.max(x) + np.max(goodargs[2]), 1000])\n # num_segments = int(end * 1.666)\n # r = np.linspace(self.a + 1e-07,\n # end,\n # num_segments)\n # norm_scale = np.array([scale[0]] * num_segments)\n # norm_args = [np.array([arg[0]] * num_segments) for arg in goodargs]\n # len_scale = len(scale)\n # scale = norm_scale * np.trapz(self._pdf(r, *norm_args[1:]), r)[:len_scale]\n mu = goodargs[1]\n b = goodargs[2]\n s = 1 - 0.5 * np.exp((0 - mu) / b)\n np.place(output, cond, self._pdf(*goodargs) / s)\n if output.ndim == 0:\n return output[()]\n return output",
"def isSetScale(self):\n return _libsbml.Unit_isSetScale(self)",
"def Damineli16(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def get_scale(units, compartmentId, volume, extracellularVolume):\r\n if compartmentId == 'c':\r\n V = volume\r\n else:\r\n V = extracellularVolume\r\n\r\n if units == 'uM':\r\n return 1. / N_AVOGADRO / V * 1e6\r\n elif units == 'mM':\r\n return 1. / N_AVOGADRO / V * 1e3\r\n elif units == 'molecules':\r\n return 1.\r\n else:\r\n raise Exception('Invalid units \"%s\"' % units)",
"def get_scale_parameter(self):\n\n if self.scale_parameter == 0.0:\n shape_in_gamma_func = float(1+(1/self.shape_parameter))\n gamma_func = special.gamma(shape_in_gamma_func)\n self.scale_parameter = (self.mean_fire_recurrence/gamma_func)\n return self.scale_parameter\n else:\n return self.scale_parameter",
"def test_compute_spectral_norms(self):\n\t\tdetails = self.watcher.analyze(layers=[self.second_layer], pool=False, randomize=False, plot=False, mp_fit=False, svd_method=ACCURATE_SVD)\n\n\t\t# SLOW method\n\t\ta = details.spectral_norm.to_numpy()\n\t\tself.assertAlmostEqual(a[0],20.2149, places=3)\n\t\tself.assertAlmostEqual(a[1],24.8158, places=3)\n\t\tself.assertAlmostEqual(a[2],19.3795, places=3)",
"def interpolate_variable(self, wavelengths, apertures):\n\n if self.n_ap == 1:\n return self.flux[0, :]\n\n sed_apertures = self.apertures.to(u.au).value\n sed_wav = self.wav.to(u.micron).value\n\n # If any apertures are larger than the defined max, reset to max\n apertures[apertures > sed_apertures.max()] = sed_apertures.max() * 0.999\n\n # If any apertures are smaller than the defined min, raise Exception\n if np.any(apertures < sed_apertures.min()):\n raise Exception(\"Aperture(s) requested too small\")\n\n # Find wavelength order\n order = np.argsort(wavelengths)\n\n # Interpolate apertures vs wavelength\n log10_ap_interp = interp1d(np.log10(wavelengths[order]), np.log10(apertures[order]), bounds_error=False, fill_value=np.nan)\n\n # Create interpolating function\n flux_interp = interp1d(sed_apertures, self.flux.swapaxes(0, 1))\n\n # Interpolate the apertures\n apertures = 10. ** log10_ap_interp(np.log10(sed_wav))\n\n # Extrapolate on either side\n apertures[np.log10(sed_wav) < log10_ap_interp.x[0]] = 10. ** log10_ap_interp.y[0]\n apertures[np.log10(sed_wav) > log10_ap_interp.x[-1]] = 10. ** log10_ap_interp.y[-1]\n\n # Interpolate and return only diagonal elements\n return flux_interp(apertures).diagonal()",
"def zonotope_inside_scale(z,Y):\n model=Model(\"inside_scale\")\n n,N=Y.shape\n p=np.empty((z.G.shape[1],N),dtype='object')\n scale=model.addVar(obj=1)\n for row in range(p.shape[0]):\n for column in range(N):\n p[row,column]=model.addVar(lb=-GRB.INFINITY,ub=GRB.INFINITY)\n model.update()\n for row in range(p.shape[0]):\n for column in range(N):\n model.addConstr(p[row,column]<=scale)\n model.addConstr(-p[row,column]<=scale)\n constraints_AB_eq_CD(model,np.eye(n),Y-z.x,z.G,p)\n model.setParam('OutputFlag', 0)\n model.optimize()\n return scale.X",
"def beam_radius(self, x, Amp, beam_type='vortex', Amp_Flag=True):\r\n \r\n# dx = x[[0],[1]]-x[[0],[0]]\r\n# \r\n# Intensity = (Amp*Amp.conjugate()).real\r\n# N,N = Amp.shape\r\n# \r\n# if beam_type == 'vortex':\r\n# \r\n# \r\n# m,n = matrix_Lib.getPositon(Intensity)\r\n# \r\n# elif beam_type == 'gauss':\r\n# \r\n# m,n = matrix_Lib.getPositon(Intensity,value=np.max(Intensity)/np.e**2)\r\n# \r\n# # cartesian coordinate only;\r\n# radius = np.sqrt(((m-N/2)*dx)**2+((n-N/2)*dx)**2)\r\n# \r\n# return radius\r\n \r\n dx = x[[0],[1]]-x[[0],[0]]\r\n \r\n if Amp_Flag:\r\n Intensity = (Amp*Amp.conjugate()).real\r\n else:\r\n Intensity = Amp\r\n \r\n N,N = Amp.shape\r\n \r\n if beam_type == 'vortex':\r\n \r\n radius = 0\r\n Max = np.max(Intensity)\r\n \r\n NumofDots = 0\r\n \r\n for i in range(N):\r\n for j in range(N):\r\n if Intensity[i,j] > math.floor(Max*1e8)/1e8:\r\n radius += np.sqrt(((i-N/2)*dx)**2+((j-N/2)*dx)**2)\r\n NumofDots += 1\r\n \r\n radius = radius/NumofDots\r\n \r\n elif beam_type == 'gauss':\r\n \r\n m,n = self.getPositon(Intensity, value = np.max(Intensity)/np.e**2)\r\n # appropriate for cartesian coordinate only;\r\n radius = np.sqrt(((m-N/2)*dx)**2+((n-N/2)*dx)**2)\r\n \r\n return radius*2",
"def resolution(self, radius, wave = None):\n dev = Prism.minDeviation(self,wave)\n alpha = dev/2 + self.angle/2\n\n # Form path difference between top and bottom of the beam\n d = 4*radius*math.sin(self.angle/2)/math.cos(alpha)\n dmax = 2.0*self.height*math.tan(self.angle/2) # Length of bottom of prism\n if d > dmax:\n d = dmax\n print(\"Resolution limited by size of prism\")\n\n\n dn = self.n.getDerivative(wave) # dn/d lambda\n return 1000*d*dn # scale to microms",
"def testKnown(self):\n numAmps = (2, 2)\n bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0), afwGeom.Extent2I(4, 4))\n # make a 4x4 image with 4 identical 2x2 subregions that flatten to -1, 0, 1, 2\n im = afwImage.ImageF(bbox)\n imArr = im.getArray()\n imArr[:, :] = np.array(((-1, 0, -1, 0),\n (1, 2, 1, 2),\n (-1, 0, -1, 0),\n (1, 2, 1, 2)), dtype=imArr.dtype)\n\n sqCoeffs = np.array(((0, 0.11), (-0.15, -12)))\n detector = self.makeDetector(bbox=bbox, numAmps=numAmps, sqCoeffs=sqCoeffs)\n ampInfoCat = detector.getAmpInfoCatalog()\n\n linSq = LinearizeSquared()\n linSq(im, detector=detector)\n\n # amp 0 has 0 squared coefficient and so makes no correction\n imArr0 = im.Factory(im, ampInfoCat[0].getBBox()).getArray()\n linCoeff0 = ampInfoCat[0].getLinearityCoeffs()[0]\n self.assertEqual(0, linCoeff0)\n self.assertFloatsAlmostEqual(imArr0.flatten(), (-1, 0, 1, 2))\n\n # test all amps\n for ampInfo in ampInfoCat:\n imArr = im.Factory(im, ampInfo.getBBox()).getArray()\n linCoeff = ampInfo.getLinearityCoeffs()[0]\n expect = np.array((-1 + linCoeff, 0, 1 + linCoeff, 2 + 4*linCoeff), dtype=imArr.dtype)\n self.assertFloatsAlmostEqual(imArr.flatten(), expect)",
"def _check_scale_factor(\n spatial_data: Optional[Mapping],\n img_key: Optional[str],\n scale_factor: Optional[float],\n) -> float:\n if scale_factor is not None:\n return scale_factor\n elif spatial_data is not None and img_key is not None:\n return spatial_data[\"scalefactors\"][f\"tissue_{img_key}_scalef\"]\n else:\n return 1.0",
"def test_spectral_density_vega_wf(wf, fluxd, to):\n v = fluxd.to(to.unit, spectral_density_vega(wf))\n assert v.unit == to.unit\n if to.unit in (VEGAmag, JMmag):\n assert np.isclose(v.value, to.value, atol=0.001)\n else:\n assert np.isclose(v.value, to.value, rtol=0.001)",
"def scale(c,v,p):\n scaleval = min([coeff.valuation(p) for coeff in c.coefficients()])\n if scaleval > 0:\n c = c/(p**scaleval)\n v = v - scaleval\n if v <= 0:\n flag = False\n else:\n flag = True\n return [flag,c,v]",
"def plate_scale(platescale):\n if platescale.unit.is_equivalent(si.arcsec / si.m):\n platescale_val = platescale.to_value(si.radian / si.m)\n elif platescale.unit.is_equivalent(si.m / si.arcsec):\n platescale_val = (1 / platescale).to_value(si.radian / si.m)\n else:\n raise UnitsError(\"The pixel scale must be in angle/distance or distance/angle\")\n\n return Equivalency(\n [(si.m, si.radian, lambda d: d * platescale_val, lambda a: a / platescale_val)],\n \"plate_scale\",\n {\"platescale\": platescale},\n )",
"def getScale(self, mode='ACC'):\t#good\r\n\t\tif mode.upper() == 'ACC':\r\n\t\t\treg = 0x1C\r\n\t\telif mode.upper() == 'GYR':\r\n\t\t\treg = 0x1B\t\t\r\n\t\telse:\r\n\t\t\treturn False\r\n\t\tcurrentVal = self.read(reg)\r\n\t\tcurrentVal = self.dec2BinList(currentVal)\r\n\t\tscaleSetting = (currentVal[4]*2) + (currentVal[3]*1) \r\n\t\tif mode.upper() == 'ACC':\r\n\t\t\tscale = 2**(scaleSetting+1) \r\n\t\telif mode.upper() == 'GYR':\r\n\t\t\tscale = (2**(scaleSetting+1))*125\r\n\t\telse:\r\n\t\t\treturn False\r\n\t\treturn scale,scaleSetting",
"def sigma_R(field, scale):\n field_filtered = filter_Field(field, tophat_kernel, (scale,))\n return field_filtered.t.std()",
"def test_SMEL_args():\n testing_function('sme', bilinear=False)",
"def Cardelli89(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def test_spectral_density_vega_bp(filename, fluxd, to, tol):\n fn = get_pkg_data_filename(os.path.join(\n '..', '..', 'photometry', 'data', filename))\n bp = synphot.SpectralElement.from_file(fn)\n\n v = fluxd.to(to.unit, spectral_density_vega(bp))\n assert v.unit == to.unit\n if to.unit in (VEGAmag, JMmag):\n assert np.isclose(v.value, to.value, atol=tol)\n else:\n assert np.isclose(v.value, to.value, rtol=tol)",
"def jam_axi_rms(surf_lum, sigma_lum, qobs_lum, surf_pot, sigma_pot, qobs_pot,\n inc, mbh, distance, xbin, ybin, ml=None, normpsf=1., pixang=0.,\n pixsize=0., plot=True, rms=None, erms=None, sigmapsf=0.,\n goodbins=None, quiet=False, beta=None, step=0., nrad=20,\n nang=10, rbh=0.01, tensor='zz', vmin=None, vmax=None, **kwargs):\n if beta is None:\n beta = np.zeros_like(surf_lum) # Anisotropy parameter beta = 1 - (sig_z/sig_R)**2\n if not (surf_lum.size == sigma_lum.size == qobs_lum.size == beta.size):\n raise ValueError(\"The luminous MGE components do not match\")\n if not (surf_pot.size == sigma_pot.size == qobs_pot.size):\n raise ValueError(\"The total mass MGE components do not match\")\n if xbin.size != ybin.size:\n raise ValueError(\"xbin and ybin do not match\")\n if rms is not None:\n if erms is None:\n erms = np.full_like(rms, np.median(rms)*0.05) # Constant ~5% errors\n if goodbins is None:\n goodbins = np.ones_like(rms, dtype=bool)\n elif goodbins.dtype != bool:\n raise ValueError(\"goodbins must be a boolean vector\")\n if not (xbin.size == rms.size == erms.size == goodbins.size):\n raise ValueError(\"(rms, erms, goodbins) and (xbin, ybin) do not match\")\n\n sigmapsf = np.atleast_1d(sigmapsf)\n normpsf = np.atleast_1d(normpsf)\n if sigmapsf.size != normpsf.size:\n raise ValueError(\"sigmaPSF and normPSF do not match\")\n\n pc = distance*np.pi/0.648 # Constant factor to convert arcsec --> pc\n\n surf_lum_pc = surf_lum\n surf_pot_pc = surf_pot\n sigma_lum_pc = sigma_lum*pc # Convert from arcsec to pc\n sigma_pot_pc = sigma_pot*pc # Convert from arcsec to pc\n xbin_pc = xbin*pc # Convert all distances to pc\n ybin_pc = ybin*pc\n pixSize_pc = pixsize*pc\n sigmaPsf_pc = sigmapsf*pc\n step_pc = step*pc\n\n # Add a Gaussian with small sigma and the same total mass as the BH.\n # The Gaussian provides an excellent representation of the second moments\n # of a point-like mass, to 1% accuracy out to a radius 2*sigmaBH.\n # The error increses to 14% at 1*sigmaBH, independently of the BH mass.\n #\n if mbh > 0:\n sigmaBH_pc = rbh*pc # Adopt for the BH just a very small size\n surfBH_pc = mbh/(2*np.pi*sigmaBH_pc**2)\n surf_pot_pc = np.append(surfBH_pc, surf_pot_pc) # Add Gaussian to potential only!\n sigma_pot_pc = np.append(sigmaBH_pc, sigma_pot_pc)\n qobs_pot = np.append(1., qobs_pot) # Make sure vectors do not have extra dimensions\n\n qobs_lum = qobs_lum.clip(0, 0.999)\n qobs_pot = qobs_pot.clip(0, 0.999)\n\n t = clock()\n rmsModel = _vrms2(xbin_pc, ybin_pc, inc, surf_lum_pc, sigma_lum_pc,\n qobs_lum, surf_pot_pc, sigma_pot_pc, qobs_pot, beta,\n tensor, sigmaPsf_pc, normpsf, pixSize_pc, pixang,\n step_pc, nrad, nang)\n if not quiet:\n print('jam_axi_rms elapsed time sec: %.2f' % (clock() - t))\n\n if tensor in ('xx', 'yy', 'zz'):\n rmsModel = np.sqrt(rmsModel.clip(0)) # Return SQRT and fix possible rounding errors\n if tensor in ('xy', 'xz'):\n rmsModel *= np.sign(xbin*ybin) # Calculation was done in positive quadrant\n\n # Analytic convolution of the MGE model with an MGE circular PSF\n # using Equations (4,5) of Cappellari (2002, MNRAS, 333, 400)\n #\n lum = surf_lum_pc*qobs_lum*sigma_lum**2 # Luminosity/(2np.pi) of each Gaussian\n flux = np.zeros_like(xbin) # Total MGE surface brightness for plotting\n for sigp, norp in zip(sigmapsf, normpsf): # loop over the PSF Gaussians\n sigmaX = np.sqrt(sigma_lum**2 + sigp**2)\n sigmaY = np.sqrt((sigma_lum*qobs_lum)**2 + sigp**2)\n surfConv = lum / (sigmaX*sigmaY) # PSF-convolved in Lsun/pc**2\n for srf, sx, sy in zip(surfConv, sigmaX, sigmaY): # loop over the galaxy MGE Gaussians\n flux += norp*srf*np.exp(-0.5*((xbin/sx)**2 + (ybin/sy)**2))\n\n if rms is None:\n\n chi2 = None\n if ml is None:\n ml = 1.\n else:\n rmsModel *= np.sqrt(ml)\n\n else:\n\n if (ml is None) or (ml <= 0):\n\n # y1, dy1 = rms, erms # (y1 are the data, y2 the model)\n # scale = sum(y1*y2/dy1**2)/sum(y2**2/dy1**2) # (equation 51)\n #\n ml = (np.sum(rms[goodbins]*rmsModel[goodbins]/erms[goodbins]**2)\n / np.sum((rmsModel[goodbins]/erms[goodbins])**2))**2\n\n rmsModel *= np.sqrt(ml)\n chi2 = np.sum(((rms[goodbins]-rmsModel[goodbins])/erms[goodbins])**2) / goodbins.sum()\n\n if not quiet:\n print('inc=%.1f beta_z=%.2f M/L=%.3g BH=%.2e chi2/DOF=%.3g' % (inc, beta[0], ml, mbh*ml, chi2))\n mass = 2*np.pi*surf_pot_pc*qobs_pot*sigma_pot_pc**2\n print('Total mass MGE: %.4g' % np.sum(mass*ml))\n\n if plot:\n\n rms1 = rms.copy() # Only symmetrize good bins\n rms1[goodbins] = symmetrize_velfield(xbin[goodbins], ybin[goodbins], rms[goodbins])\n\n if (vmin is None) or (vmax is None):\n vmin, vmax = stats.scoreatpercentile(rms1[goodbins], [0.5, 99.5]) # Could use np.percentile in Numpy 1.10\n\n plt.clf()\n plt.subplot(121)\n plot_velfield(xbin, ybin, rms1, vmin=vmin, vmax=vmax, flux=flux, **kwargs)\n plt.title(r\"Input $V_{\\rm rms}$\")\n\n plt.subplot(122)\n plot_velfield(xbin, ybin, rmsModel, vmin=vmin, vmax=vmax, flux=flux, **kwargs)\n plt.plot(xbin[~goodbins], ybin[~goodbins], 'ok', mec='white')\n plt.title(r\"Model $V_{\\rm rms}$\")\n plt.tick_params(labelleft='off')\n plt.subplots_adjust(wspace=0.03)\n\n return rmsModel, ml, chi2, flux",
"def powder_XRD(crystal,wavelength, get_mults=False):\n \n # The wavenumber of the input wavelength\n nu = 2*n.pi/wavelength\n\n # Make a list of the accessible rlvs\n rlvs = find_accessible_rlvs(crystal,wavelength)\n \n # Now we calculate the scattering intensity from each rlv\n intensities = {\n tuple(rlv): n.abs(crystal.structure_factor(rlv))**2\n for rlv in rlvs}\n \n # Now sum up all rlvs with the same magnitude. We also\n # get rid of all the scattering vectors with 0 intensity\n magnitudes = {}\n multiplicities = {}\n for rlv, intensity in intensities.items():\n repeat = False\n mag = n.linalg.norm(rlv)\n for oldmag in magnitudes:\n if n.isclose(mag,oldmag):\n magnitudes[oldmag] += intensity\n multiplicities[oldmag] += 1\n repeat = True\n break\n if not repeat and not n.isclose(mag,0):\n multiplicities[mag] = 1\n magnitudes[mag] = intensity\n \n # Now we reformat the multiplicity data in a nice way\n multiplicities = {2 * n.arcsin(mag / (2 * nu)) * 180 / n.pi:\n multiplicity\n for mag, multiplicity in multiplicities.items()\n if not n.allclose(magnitudes[mag],0)}\n\n # And now we calculate the scattering intensities\n # (a.u. per steradian) as a function of scattering angle\n intensities = {2 * n.arcsin(mag / (2 * nu)) * 180 / n.pi:\n intensity * \n # This factor corrects for the fact that the same total\n # power in the debye scherrer rings is more\n # concentrated when 2\\theta is near 0 or 2pi\n 1 / n.sin(2*n.arcsin(mag/(2*nu))) *\n # This factor corrects for the probability that any\n # given crystal domain will scatter into the rlv\n 1 / mag *\n # This factor corrects for polarization effects,\n # Assuming an unpolarized input beam and no polarization\n # analysis\n (1 + n.cos(2*n.arcsin(mag/(2*nu)))**2)/2\n for mag, intensity in magnitudes.items()\n if not n.allclose(intensity,0)}\n if get_mults:\n return intensities, multiplicities\n else:\n return intensities",
"def compute_resolution(zoom, size_px):\n # Calibration data:\n dist_in_um = 10\n dist_in_px = np.array([21.13, 19.62, 8.93])\n zooms = np.array([1.5, 3, 4.5])\n image_max_sizes = np.array([330, 610, 410])\n \n return np.mean((dist_in_um/dist_in_px) * (zoom/zooms) * (image_max_sizes/size_px))",
"def Hosek18b(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave"
] | [
"0.63555455",
"0.6032541",
"0.5720245",
"0.56559825",
"0.55820227",
"0.5571117",
"0.55641395",
"0.5554358",
"0.55440706",
"0.5535525",
"0.54904956",
"0.5435876",
"0.5406285",
"0.5405981",
"0.5359477",
"0.53531414",
"0.5340021",
"0.53357816",
"0.53222436",
"0.53126466",
"0.5305457",
"0.5287403",
"0.52856266",
"0.5281704",
"0.5274005",
"0.5266809",
"0.5258586",
"0.52553356",
"0.52535915",
"0.5251993",
"0.52474725",
"0.5242623",
"0.52410406",
"0.5223055",
"0.5207767",
"0.51938456",
"0.5192101",
"0.5190488",
"0.51888967",
"0.5188061",
"0.5186963",
"0.5184558",
"0.51841",
"0.5178314",
"0.51759726",
"0.5164357",
"0.51532453",
"0.51445305",
"0.51407355",
"0.513964",
"0.5137735",
"0.51328754",
"0.5129911",
"0.51277786",
"0.5116018",
"0.5112098",
"0.5110538",
"0.51073444",
"0.5097613",
"0.5089206",
"0.50861377",
"0.5079817",
"0.50779307",
"0.50778955",
"0.50689375",
"0.50562793",
"0.50522023",
"0.5051829",
"0.5050385",
"0.5046136",
"0.50444055",
"0.5042501",
"0.5040303",
"0.50379354",
"0.50324833",
"0.502235",
"0.50215006",
"0.50178504",
"0.5013273",
"0.5013164",
"0.50021964",
"0.5000256",
"0.49917543",
"0.49897954",
"0.49892005",
"0.49844053",
"0.49822775",
"0.49806818",
"0.4978356",
"0.49770987",
"0.4966177",
"0.49590266",
"0.49538293",
"0.49520215",
"0.49497202",
"0.4948259",
"0.4948237",
"0.4937661",
"0.49368182",
"0.49345338"
] | 0.71937263 | 0 |
Returns a Dictionary for the triangular numbers associated with the Zernike pyramid | def triangular_numbers(N_levels):
zernike_rows = list(np.arange(1, N_levels + 1))
triangular = {}
for i, zernike_per_row in enumerate(zernike_rows):
total = np.sum(zernike_rows[:i+1])
triangular[zernike_per_row] = total
return triangular | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_tri_dict(self):\n tri_dict = dict(\n vertices=np.concatenate([self.contour.vertices] + [hole.vertices for hole in self.holes]),\n segments=list(self._segment_pairs())\n )\n if self.holes:\n tri_dict['holes'] = np.array([hole.interior_point for hole in self.holes])\n return tri_dict",
"def create_triad_counts():\n triads = [str(i) + str(j) + str(k) for i in range(2) for j in range(2) for k in range(2)]\n triad_counts = {}\n\n for triad in triads:\n triad_counts[triad] = [0, 0]\n\n return triad_counts",
"def _make_limb_dict():\n\n return {'left_arm_y': 10, 'right_arm_y': 13,\n 'left_arm_z': 11, 'right_arm_z': 14,\n 'left_leg_y': 4, 'right_leg_y': 7,\n 'left_leg_z': 5, 'right_leg_z': 8,\n 'hip_y': 2, 'hip_x': 1}",
"def pascal_segmentation_lut():\n\n classes_lut = dict([(0, 'background'), (255, 'teeth')])\n\n return classes_lut",
"def template(self) -> Dict[Union[int, str], int]:\r\n if not hasattr(self, '_template'):\r\n index = 0\r\n self._template = {}\r\n n = sum(self.sequence)\r\n for i, step in enumerate(self):\r\n max = index + step - 1\r\n max_next = max + self.sequence[i + 1] if i < len(self.sequence) - 1 else 0\r\n for j in range(index, index + step):\r\n if j < max:\r\n self._template[j] = j + 1\r\n if j + step < n and j + step <= max_next:\r\n self._template[str(j)] = j + step\r\n index += step\r\n return self._template",
"def create_dictionary():\n d = {}\n for y in range(HEIGHT):\n if (y % 2) != 0:\n pos = (10*y)+10\n else:\n pos =((10*y)-9)+10 \n for x in range(WIDTH):\n xy_tuple = (x,y)\n d[pos] = xy_tuple\n if (y % 2) != 0:\n pos = pos - 1\n else:\n pos = pos + 1\n \n return d",
"def make_pt_2_neighbors(tri):\n pt_dict=dict()\n for vlist in tri.vertices:\n for i in vlist:\n if not i in pt_dict:\n pt_dict[i]=list()\n for k in vlist:\n if k != i:\n pt_dict[i].insert(0,k)\n for i in range(tri.points.shape[0]):\n pt_dict[i]=np.unique(pt_dict[i]).tolist()\n return pt_dict",
"def test_get_triangle_dict_all_int(self):\n triangle = {'a': 1, 'b': 2, 'c': 3}\n result = get_triangle_type(triangle)\n self.assertEqual(result, 'scalene')",
"def dimension_homology_sc(self):\r\n vec_dic = {}\r\n for k in range(self.dimension()+1):\r\n p = k \r\n A = self.matrix_simmetric_representate(p)\r\n dn = 0\r\n dc = 0\r\n if (p == 0):\r\n dn = A.shape[1]\r\n if (p > 0 and (p <= self.dimension())):\r\n null = null_space(A)\r\n if (null.size != 0):\r\n dn = len(null[0])\r\n if (all(elem == 0 for elem in null[0])):\r\n dn = 0 \r\n p = k + 1\r\n if (p>0 and (p <= self.dimension())):\r\n A1=self.matrix_simmetric_representate(p)\r\n col = orth(A1)\r\n if (col.size != 0):\r\n dc = len(col[0])\r\n else: \r\n dc = 0\r\n vec_dic[k] = dn - dc\r\n return vec_dic",
"def get_all_potential_edges(self) -> Dict[str,\n Tuple[int, int, int, int]]:\n orig_rows = self.tile_rows\n\n ret = dict()\n\n for i in range(0, 4):\n self.rotate_right(i)\n for j in range(0, 2):\n self.flip_l_r(j)\n for k in range(0, 2):\n self.flip_t_b(k)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'rr{i}_lr{j}_tb{k}'] = edges\n\n self.tile_rows = orig_rows\n\n for j in range(0, 2):\n self.flip_l_r(j)\n for i in range(0, 4):\n self.rotate_right(i)\n for k in range(0, 2):\n self.flip_t_b(k)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'lr{j}_rr{i}_tb{k}'] = edges\n\n self.tile_rows = orig_rows\n\n for j in range(0, 2):\n self.flip_l_r(j)\n for k in range(0, 2):\n self.flip_t_b(k)\n for i in range(0, 4):\n self.rotate_right(i)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'lr{j}_tb{k}_rr{i}'] = edges\n\n self.tile_rows = orig_rows\n\n for k in range(0, 2):\n self.flip_t_b(k)\n for j in range(0, 2):\n self.flip_l_r(j)\n for i in range(0, 4):\n self.rotate_right(i)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'tb{k}_lr{j}_rr{i}'] = edges\n\n self.tile_rows = orig_rows\n\n for k in range(0, 2):\n self.flip_t_b(k)\n for i in range(0, 4):\n self.rotate_right(i)\n for j in range(0, 2):\n self.flip_l_r(j)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'tb{k}_rr{i}_lr{j}'] = edges\n\n self.tile_rows = orig_rows\n\n for i in range(0, 4):\n self.rotate_right(i)\n for k in range(0, 2):\n self.flip_t_b(k)\n for j in range(0, 2):\n self.flip_l_r(j)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'rr{i}_tb{k}_lr{j}'] = edges\n\n self.tile_rows = orig_rows\n\n return ret",
"def trios(self):\n return self._trios",
"def tripTrian(G):\r\n\tn = len(G)\r\n\ttrip = set()\r\n\ttrian = set()\r\n\tfor u in range(n):\r\n\t\tfor v in G[u]:\r\n\t\t\tfor w in G[v]:\r\n\t\t\t\tif v !=u and v!=w and u != w:\r\n\t\t\t\t\ttrip.add((u,v,w))\r\n\t\t\t\t\ts = getTrip((u,v,w))\r\n\t\t\t\t\tif u in G[w]:\r\n\t\t\t\t\t\ttrip.add((u,v,w))\r\n\t\t\t\t\t\ttrian.update(s)\r\n\t\t\t\t\t\ttrip.update(s)\r\n\t\t\t\t\telse:\r\n\t\t\t\t\t\ttrip.update(s)\r\n\r\n\treturn (len(trian)/6,len(trip)/6)",
"def wc_matrix(matrix):\n return [{\"A\": position[\"T\"], \"T\": position[\"A\"], \"C\": position[\"G\"], \"G\": position[\"C\"]} for position in matrix[::-1]]",
"def creating_dict(i, states):\n # base case\n if i == 5:\n # no more edges - recursion ends here\n return {'barcode': []}\n\n # iterative case\n else:\n # this is a tree structure where the node contains timepoint information and barcode information\n # and three edges link to other nodes that represent lineages in three differnet states\n updated_dict = {'t{}'.format(i): {state: creating_dict(i + 1, states) for state in states}}\n updated_dict['t{}'.format(i)].update({'barcode': []})\n return updated_dict",
"def grid_vals(grid):\n\tletters = list(grid)\n\t#print \"---------------------------------\\n-------------------\"\n\t#print letters\n\t#print \"----------------------------------\\n-------------------\"\n\tassert len(letters) == 81\n\ttempdict = zip(squares, letters)\n\treturn dict(tempdict)",
"def create_dictionary_indexes(self):\n direction_dictionary = {}\n direction_dictionary[UP] = self.direction_list(UP)\n direction_dictionary[DOWN] = self.direction_list(DOWN)\n direction_dictionary[LEFT] = self.direction_list(LEFT)\n direction_dictionary[RIGHT] = self.direction_list(RIGHT)\n return direction_dictionary",
"def second_round_output(self, ram_dict):\n\t\tresult = {}\n\t\tfor key in ram_dict:\n\t\t\tresult[key] = [len(ram_dict[key]), ram_dict[key]]\n\t\treturn result",
"def _get_tri_edges(tri):\n return [[tri[1], tri[2]], [tri[2], tri[0]], [tri[0], tri[1]]]",
"def getFi():\n fi = {}\n for i in range(4):\n for k in range(1,9):\n arg = i+1+(4*(k-1))\n val = (8*i)+k\n if arg <= 32 :\n fi[arg]=val\n return fi",
"def _upward_triangle_indicies(height=3):\n return [(height-r,c) for r in range(height) for c in range(-abs(r),abs(r)+1)]",
"def get_triangle_numbers(n):\n r = []\n for i in xrange(1, n + 1):\n t = ((i * (i + 1)) / 2)\n r.append(t)\n return r",
"def DictFunction2():\r\n print \"Create Second Dictionary\"\r\n NumberDict = dict(zip((i for i in range(16)), (hex(i) for i in range(16))))\r\n print NumberDict",
"def zernike_visuo__pyramid(zbasis, n, m, nlevels, figsize=(12, 12), cmap='jet', fontsize=20, colorbar_labelsize=10):\n \n cmap = plt.get_cmap('%s' %cmap)\n \n index = 0\n if not (nlevels>=0):\n print('Input parameter must be >= 0')\n raise AssertionError() \n \n axlist = []\n if (nlevels == 0):\n \n fig = plt.figure(num = 1, figsize=figsize)\n ax = fig.add_subplot(1,1,1)\n axlist.append(ax)\n im = ax.imshow(zbasis, cmap=cmap, interpolation='lanczos')\n ax.set_title(r'$Z_{%d}^{%d}$' %(n,m), fontsize=fontsize)\n\tax.axis('off')\n\n \n else:\n \n # ++++ Defining layout for row number n and colunmn number m ++++++++\n \n fig = plt.figure(1, figsize=figsize)\n row_n = nlevels + 1\n col_m = 2*nlevels + 1\n\n top = (col_m + 1)/2\n leftside = row_n*col_m - col_m + 1\n rightside = row_n*col_m \n\n k1 = 0; k2 = 0\n \n\n for i in xrange(top,row_n*col_m+1, 2*col_m):\n\n ax = fig.add_subplot(row_n,col_m,i)\n axlist.append(ax)\n im=ax.imshow(zbasis[index], cmap=cmap, interpolation='lanczos', alpha=None)\n ax.set_title(r'$Z_{%d}^{%d}$' %(n[index],m[index]), fontsize=fontsize)\n ax.axis('off')\n index += 1\n s1 = i + col_m + 1\n s2 = i + col_m - 1 \n jj1 = k1\n jj2 = k2\n\n\n while (s2 <= leftside): \n\n ax = fig.add_subplot(row_n,col_m,s2)\n axlist.append(ax)\n im=ax.imshow(zbasis[index], cmap=cmap, interpolation='lanczos')\n ax.set_title(r'$Z_{%d}^{%d}$' %(n[index],m[index]), fontsize=fontsize)\n ax.axis('off')\n index += 1\n s2 +=col_m - 1\n jj1 += 1\n jj2 -= 1\n\n leftside +=2\n\n jj1 = k1\n jj2 = k2\n\n while (s1 <= rightside):\n \n ax = fig.add_subplot(row_n,col_m,s1)\n axlist.append(ax)\n im=ax.imshow(zbasis[index], cmap=cmap, interpolation='lanczos')\n ax.set_title(r'$Z_{%d}^{%d}$' %(n[index],m[index]), fontsize=fontsize)\n ax.axis('off')\n index += 1\n s1 +=col_m + 1\n jj1 += 1\n jj2 += 1\n\n rightside -=2\n k1 = 0; k2 += 2\n\n\n cbar = fig.colorbar(im, ax=axlist,fraction=0.05, orientation='horizontal') \n cbar.ax.tick_params(labelsize=colorbar_labelsize)\n fig.subplots_adjust(wspace=0,hspace=0, right=0.72, bottom=0.2)\n fig.savefig('zernike_orders.png', dpi=300)\n\n return None",
"def tris(self):\n return self.nlegomena(3)",
"def merchandise(t):\n d = {}\n for wagon in t:\n d[wagon[0]] = d.get(wagon[0],0) + wagon[1]\n return d",
"def _pettifor_numbers():\n return { \"Li\": 0.45,\n \"Be\": 1.5,\n \"B\": 2.0,\n \"C\": 2.5,\n \"N\": 3.0, \n \"O\": 3.5,\n \"F\": 4.0,\n \n \"Na\": 0.4,\n \"Mg\": 1.28,\n \"Al\": 1.66,\n \"Si\": 1.92,\n \"P\": 2.18,\n \"S\": 2.44,\n \"Cl\": 2.70,\n \n \"K\": 0.35,\n \"Ca\": 0.60,\n \"Sc\": 0.74,\n \"Ti\": 0.79,\n \"V\": 0.84,\n \"Cr\": 0.89,\n \"Mn\": 0.94,\n \"Fe\": 0.99,\n \"Co\": 1.04,\n \"Ni\": 1.09,\n \"Cu\": 1.20,\n \"Zn\": 1.44,\n \"Ga\": 1.68,\n \"Ge\": 1.92,\n \"As\": 2.16,\n \"Se\": 2.40,\n \"Br\": 2.64,\n\n \"Rb\": 0.30,\n \"Sr\": 0.55,\n \"Y\": 0.70,\n \"Zr\": 0.76,\n \"Nb\": 0.82,\n \"Mo\": 0.88,\n \"Tc\": 0.94,\n \"Ru\": 1.00,\n \"Rh\": 1.06,\n \"Pd\": 1.12,\n \"Ag\": 1.18,\n \"Cd\": 1.36,\n \"In\": 1.60,\n \"Sn\": 1.84,\n \"Sb\": 2.08,\n \"Te\": 2.32,\n \"I\": 2.56,\n \n \"Cs\": 0.25,\n \"Ba\": 0.50,\n \"La\": 0.748,\n \"Hf\": 0.775,\n \"Ta\": 0.83,\n \"W\": 0.885,\n \"Re\": 0.94,\n \"Os\": 0.995,\n \"Ir\": 1.05,\n \"Pt\": 1.105,\n \"Au\": 1.16,\n \"Hg\": 1.32,\n \"Tl\": 1.56,\n \"Pb\": 1.80,\n \"Bi\": 2.04,\n \"Po\": 2.28, \n \"At\": 2.52 }",
"def print_triangular_numbers(n):\r\n\r\n\tfor i in range(1, n+1):\r\n\t\tsum = int((i / 2)*(1 + i))\r\n\t\tprint(i, \"\\t\", sum)",
"def gen_triangle_level(self, i):\r\n if i == 1:\r\n return list([1])\r\n\r\n # Select previous level\r\n t = self.gen_triangle_level(i - 1)\r\n m = len(t) + 1\r\n\r\n return [1\r\n if (j == 0 or\r\n j == m - 1)\r\n else t[j - 1] + t[j]\r\n for j\r\n in range(0, m, 1)]",
"def get_triangles_per_surface(my_core, entity_ranges):\n\n t_p_s = {}\n for surface in entity_ranges['Surfaces']:\n t_p_s[surface] = my_core.get_entities_by_type(\n surface, types.MBTRI).size()\n return t_p_s",
"def grid_values(self, grid):\n chars = [col for col in grid if col in self.digits or col in '0.']\n assert len(chars) == 81\n return dict(zip(self.squares, chars))",
"def tri(self, dico):\n return sorted(dico.keys(), key=str)",
"def make_rings(self):\n _rings = {}\n _r = 0\n _indices = self.indices\n for i, polar_angle in enumerate(self.two_thetas):\n if i == 0:\n _rings[0] = [polar_angle, [self.indices_hkl(*_indices[i])]]\n elif polar_angle-_rings[_r][0] > self.polar_tolerance:\n _r += 1\n _rings[_r] = [polar_angle, [self.indices_hkl(*_indices[i])]]\n else:\n _rings[_r][1].append(self.indices_hkl(*_indices[i]))\n pa = wa = 0.0\n for j, hkl in enumerate(_rings[_r][1]):\n pa += self.two_theta_hkl(*hkl[0]) * len(hkl)\n wa += len(hkl)\n _rings[_r][0] = pa / wa\n return _rings",
"def triples():",
"def compute_map(self):\n number_of_orders = 0\n orders = []\n for i, line in enumerate(self.__grid):\n for j, column in enumerate(line):\n if self.__grid[i][j][\"humans\"] != 0:\n number_of_orders += 1\n orders.append(i)\n orders.append(j)\n orders.append(self.__grid[i][j][\"humans\"])\n orders.append(0)\n orders.append(0)\n if self.__grid[i][j][\"vampires\"] != 0:\n number_of_orders += 1\n orders.append(i)\n orders.append(j)\n orders.append(0)\n orders.append(self.__grid[i][j][\"vampires\"])\n orders.append(0)\n if self.__grid[i][j][\"werewolves\"] != 0:\n number_of_orders += 1\n orders.append(i)\n orders.append(j)\n orders.append(0)\n orders.append(0)\n orders.append(self.__grid[i][j][\"werewolves\"])\n return number_of_orders, orders",
"def get_k2(tri, v_neighbours):\n three = np.array([0, 1, 2])\n nv = tri.shape[0]\n k2s = np.empty((nv, 3), dtype=np.int32)\n for i in range(nv):\n for k in range(3):\n neighbour = v_neighbours[i, k]\n k2 = ((v_neighbours[neighbour] == i) * three).sum()\n k2s[i, k] = k2\n return k2s",
"def _get_unique_rb_torsion_types(structure, epsilon_conversion_factor):\n unique_dihedral_set = set()\n for dihedral in structure.rb_torsions:\n unique_dihedral_set.add(\n _get_dihedral_rb_torsion_key(dihedral, epsilon_conversion_factor)\n )\n\n dihed_key_dict = {\n dihed_key: i + 1 for i, dihed_key in enumerate(unique_dihedral_set)\n }\n\n return dihed_key_dict",
"def lines():\n line_dict = {}\n #\n line_dict['ArI'] = 2**0\n line_dict['HgI'] = 2**1\n line_dict['KrI'] = 2**2\n line_dict['NeI'] = 2**3\n line_dict['XeI'] = 2**4\n line_dict['CdI'] = 2**5\n line_dict['ZnI'] = 2**6\n line_dict['HeI'] = 2**7\n line_dict['OH_R24000'] = 2**8\n line_dict['OH_triplespec'] = 2**9\n line_dict['CuI'] = 2**10\n line_dict['ArII'] = 2**11\n line_dict['OH_XSHOOTER'] = 2**12\n line_dict['OH_GNIRS'] = 2**13\n line_dict['OH_NIRES'] = 2**14\n line_dict['ThAr_XSHOOTER_VIS'] = 2**15\n line_dict['OH_GMOS'] = 2**16\n line_dict['OH_MODS'] = 2**17\n line_dict['ThAr_MagE'] = 2**18 # R=4100\n line_dict['OH_FIRE_Echelle'] = 2**19 # R=6000\n line_dict['Ar_IR_GNIRS'] = 2**20 # R=6000\n line_dict['FeI'] = 2**21\n line_dict['FeII'] = 2**22\n line_dict['UNKNWN'] = 2**23\n line_dict['Ar_IR_MOSFIRE'] = 2 ** 24\n line_dict['Ne_IR_MOSFIRE'] = 2 ** 25\n line_dict['OH_MOSFIRE_Y'] = 2 ** 26\n line_dict['OH_MOSFIRE_J'] = 2 ** 27\n line_dict['OH_MOSFIRE_H'] = 2 ** 28\n line_dict['OH_MOSFIRE_K'] = 2 ** 29\n line_dict['ThAr_XSHOOTER_UVB'] = 2**30\n #\n return line_dict",
"def get_template_stech_dict(template, seq_dict, verbose=False):\n template_stech_dict = {} # Format: { \"A\": 2, \"B\": 3, ...}, where key is chain id and value\n # is the number of repetitions\n parser = PDBParser(PERMISSIVE=1, QUIET=True)\n template_object = parser.get_structure(\"template\", template)[0] # Generates pdb template object\n for chain in template_object:\n chain = CustomChain(chain) # Transforms pdb chain object to CustomChain instance\n chain.parent = None # Removes previous parent to evade biopython errors of id repetitions\n chain_seq = chain.get_sequence()\n if chain_seq in seq_dict:\n chain.id = seq_dict[chain_seq] # Updates the template chain id to the corresponding by its sequence\n template_stech_dict.setdefault(chain.id, 0)\n template_stech_dict[chain.id] += 1 # Adds to chain id counter\n if verbose: # Transforms the stech_dict to a string to be printed\n stechometry_string = \"\"\n for key in sorted(template_stech_dict.keys()):\n stechometry_string += key+\":\"+str(template_stech_dict[key])+\",\"\n stechometry_string = stechometry_string[:-1]\n print(\"Template's Stoichiometry is: \"+stechometry_string)\n return template_stech_dict",
"def ztrajs(self):\n Z = np.array(self.ztraj_).astype(np.float32)\n return [Z[:, i, :] for i in range(Z.shape[1])]",
"def BL2TRI(BL, xy):\n d = {}\n # preallocate for speed\n tri = np.zeros((len(BL), 3), dtype=np.int)\n # c is dmy index to fill up and cut off tri\n c = 0\n for i in BL:\n # reorder row if [big, small]\n if i[0] > i[1]:\n t = i[0]\n i[0] = i[1]\n i[1] = t\n # Check if small val in row is key of dict d.\n # If not, then initialize the key, value pair.\n if (i[0] in d):\n d[i[0]].append(i[1])\n else:\n d[i[0]] = [i[1]]\n\n # From dict d, make TRI\n for key in d:\n for n in d[key]:\n for n2 in d[key]:\n if (n > n2) or n not in d:\n continue\n if n2 in d[n]:\n tri[c, :] = [key, n, n2]\n c += 1\n tri = tri[0:c]\n\n # Check for points inside each triangle. If they exist, remove that triangle\n keep = np.ones(len(tri), dtype=bool)\n index = 0\n for row in tri:\n mask = np.ones(len(xy), dtype=bool)\n mask[row] = False\n remove = where_points_in_triangle(xy[mask, :], xy[row[0], :], xy[row[1], :], xy[row[2], :])\n if remove.any():\n keep[index] = False\n # if check:\n # plt.triplot(xy[:,0],xy[:,1], tri, 'g.-')\n # plt.plot(xy[row,0], xy[row,1],'ro')\n # plt.show()\n\n index += 1\n\n TRI = tri[keep]\n\n return TRI",
"def nk_table(self):\n return self.map(\"keys\", \"values\")",
"def get_k2_boundary(tri, v_neighbours):\n three = np.array([0, 1, 2])\n nv = tri.shape[0]\n k2s = np.empty((nv, 3), dtype=np.int32)\n for i in range(nv):\n for k in range(3):\n neighbour = v_neighbours[i, k]\n if neighbour == -1:\n k2s[i,k] = -1\n else:\n k2 = ((v_neighbours[neighbour] == i) * three).sum()\n k2s[i, k] = k2\n return k2s",
"def twistedLadder(self,n):\n return {i:((i+1)%n,(i-1)%n,(i+n//2)%n) for i in range(n)}",
"def footprint_corner_indices():",
"def _triangulate(self,x):\n\n t = tr.triangulate({\"vertices\": x},\"-n\")\n tri = t[\"triangles\"]\n neighbours = t[\"neighbors\"]\n\n b_cells = np.zeros(self.n_c)\n b_cells[self.n_C:] = 1\n\n three_b_cell_mask = b_cells[tri].sum(axis=1)==3\n tri = tri[~three_b_cell_mask]\n\n neigh_map = np.cumsum(~three_b_cell_mask)-1\n neigh_map[three_b_cell_mask] = -1\n neigh_map = np.concatenate((neigh_map,[-1]))\n\n neighbours = neighbours[~three_b_cell_mask]\n neighbours = neigh_map[neighbours]\n\n #6. Store outputs\n self.tris = tri\n self.n_v = tri.shape[0]\n self.Cents = x[self.tris]\n self.vs = self.get_vertex()\n\n\n #7. Manually calculate the neighbours. See doc_string for conventions.\n self.v_neighbours = neighbours\n self.neighbours = self.vs[neighbours]\n self.neighbours[neighbours == -1] = np.nan\n\n self.reset_k2s()",
"def get_tri_list(top_tri):\n\ttri_nums = [1]\n\tval = 1\n\twhile tri_nums[-1] < top_tri:\n\t\ttri_val = int(.5*(val*(val+1)))\n\t\ttri_nums.append(tri_val)\n\t\tval += 1\n\treturn tri_nums",
"def get_triangles( self, N ):\n\n # store N as an instance variable\n self.N = N\n\n # initialize array to store locations of points for all triangles in the\n # tessellation sequence\n self.triangles = np.zeros( ( self.N, 3, 2 ) )\n\n # define points of the first triangle in the tessellation sequence\n point_c = np.array( [ 0, 0 ] )\n point_b = self.a * np.array( [ np.cos( self.C ), np.sin( self.C ) ] )\n point_a = np.array( [ self.b, 0 ] )\n\n # stack the points into a single array of shape (3, 2 )\n triangle = np.vstack( [ point_c, point_b, point_a ] )\n\n # loop over the number of triangles in the sequence\n for i in range( self.N ):\n\n # store the points of the i-th triangle in the array\n self.triangles[ i ] = triangle\n\n # compute the next triangle in the tessellation sequence\n triangle = self.next_triangle( triangle = triangle )\n\n # shift the next triangle in the tessellation sequence such that its\n # point C is in the same location as point B of the previous triangle\n triangle += ( self.triangles[ i - 1, 1 ] - self.triangles[ 0, 0 ] )",
"def rectangledict(rectangles):\n return {rectangle.n: i for i, rectangle in enumerate(rectangles)}",
"def getCrowDistDict(self):\n retDict = Distribution.getCrowDistDict(self)\n retDict['n'] = self.n\n retDict['p'] = self.p\n return retDict",
"def zernIndex(j):\r\n n = int((-1.+np.sqrt(8*(j-1)+1))/2.)\r\n p = (j-(n*(n+1))/2.)\r\n k = n%2\r\n m = int((p+k)/2.)*2 - k\r\n\r\n if m!=0:\r\n if j%2==0:\r\n s=1\r\n else:\r\n s=-1\r\n m *= s\r\n\r\n return [n, m]",
"def get_dict_of_int2(self):\n pass",
"def draggableCircuitResults(self):\n returnedDictionary={}\n self.blochSpheres=self.separatedBlochSpheres()\n returnedDictionary[\"probabilities\"] = self.separatedProbabilities()\n #returnedDictionary[\"blochSpheres\"] = self.separatedBlochSpheres()\n returnedDictionary[\"diracNotation\"] = self.diracNotation()\n returnedDictionary[\"link\"] = \"\"\n returnedDictionary['chart'] = self.graph()\n try:\n returnedDictionary[\"qasm\"] = self.circuit.qasm()\n except Exception:\n #str(Exception)\n returnedDictionary[\"qasm\"] = \"//You are using custom gate\\n//with size more than 2 qubits\\n//sorry, this version doesn't support that\\n//qiskit version 0.19.1\"\n \n if self.API_TOKEN != \"\":\n returnedDictionary[\"link\"] = self.runOnIBMQ()\n \n return returnedDictionary",
"def triangsamples(self):\n\n return self._triangsamples",
"def generateNeighborMap(self):\n A=[]\n for key,value in self._ts_dict.iteritems():\n A.append(np.array([i.replace(\"#\",\" \")\n .split()[0:4] for i in value.index])\n .astype(float))\n\n B=np.array(A[0]).reshape(len(A[0]),4)\n print (B[:,0]+B[:,1])/2\n A=[]\n for key,value in self._ts_dict.iteritems():\n A.append(value.sum(axis=1).values)\n print A",
"def nmer_dictionary(self,n,dic={}):\n if self.sequence == \"\":\n self.fetchSequence()\n self.sequence = self.sequence.upper()\n for i in range(0,len(self.sequence)-n):\n subseq = self.sequence[i:][:n]\n dic[subseq]=1+dic.get(subseq,0)\n del subseq\n return dic",
"def __create_ttable(self):\n # Currently assume equiprobable distribution\n # Indexed by previous interval and current interval\n ttable = {}\n\n len_ = len(self.scale)\n\n kv = []\n for pitch in PitchRange:\n for interval in IntervalRange:\n kv.append(((interval, pitch), self.__create_trans(interval,pitch)))\n\n ttable = dict(kv)\n #pdb.set_trace()\n self.ttable = ttable",
"def degree_index_dict_no_red(self):\n did = dict()\n for i,c in enumerate(self.classes):\n if isinstance(c, reducible_boundary) or isinstance(c, psi_class) or c == 0:\n continue \n try:\n degree = c.degree\n except AttributeError:\n degree = 1\n if not did.has_key(degree):\n did[degree] = []\n did[degree].append(i+1)\n return did",
"def __init__(self,n):\n\t\tself._dictOut={}\n\t\tself._dictIn = {}\n\t\tfor i in range(n):\n\t\t\tself._dictOut[i]=[]\n\t\t\tself._dictIn[i] = []",
"def itos(self):\n return {integer: string for string, integer in self.stoi.items()}",
"def histogramintegrals(self):\n return {}",
"def abgrenzung(self):\n return dict([('x', self.x), ('y', self.y),\n ('hoehe', self.hoehe), ('breite', self.breite)])",
"def zoisite():\n\n rho = 3343.\n\n C = np.zeros((6,6), dtype=float)\n C[0,0] = 279.8; C[0,1] = 94.7; C[0,2] = 88.7; C[0,3] = 0.; C[0,4] = 0.; C[0,5] = 0.\n C[1,0] = C[0,1]; C[1,1] = 249.2; C[1,2] = 27.5; C[1,3] = 0.; C[1,4] = 0.; C[1,5] = 0.\n C[2,0] = C[0,2]; C[2,1] = C[1,2]; C[2,2] = 209.4; C[2,3] = 0.; C[2,4] = 0.; C[2,5] = 0.\n C[3,0] = C[0,3]; C[3,1] = C[1,3]; C[3,2] = C[2,3]; C[3,3] = 51.8; C[3,4] = 0.; C[3,5] = 0.\n C[4,0] = C[0,4]; C[4,1] = C[1,4]; C[4,2] = C[2,4]; C[4,3] = C[3,4]; C[4,4] = 81.4; C[4,5] = 0.\n C[5,0] = C[0,5]; C[5,1] = C[1,5]; C[5,2] = C[2,5]; C[5,3] = C[3,5]; C[5,4] = C[4,5]; C[5,5] = 66.3\n\n return C, rho",
"def getQuadrilaterals(self):\n pass",
"def makeIndexMap(self):\n\t\tn = self.numRects\n\t\thalfList = [[(j,n-1-i+j) for j in range(i+1)] for i in range(n)]\n\t\tfullList = halfList + [[(j[1],j[0]) for j in i] for i in halfList[n-2::-1]]\n\t\treturn fullList",
"def triangle(self):\n [r,c] = self.D\n m = min(r,c)\n S = self\n T = zeros(r,c)\n while m > 0:\n NoLigne = 0\n while S[NoLigne, 0] == 0 and (NoLigne < m - 1):\n NoLigne += 1\n S = S.swap(NoLigne,0)\n if S[0, 0] != 0:\n pivot = S[0,0]\n for k in range(1,m):\n if S[k,0] != 0:\n S = S.comb_lignes(pivot, -S[k,0],k,0)\n #print(\"pivot = \"+str(pivot))\n #print(\"S dans for :\")\n #print(S)\n T = T.remplace_ligned(r - m,S.F)\n #print(\"Évolution de T :\")\n #print(T)\n S = S.decoupe()\n m -= 1\n return T",
"def number_of_routes(max_i, max_j):\n routes = {}\n\n for i in range(1, max_i + 1):\n routes[(i, 0)] = 1\n for j in range(1, max_j + 1):\n routes[(0, j)] = 1\n\n for i in range(1, max_i + 1):\n for j in range(1, max_j + 1):\n routes[(i, j)] = routes[(i - 1, j)] + routes[(i, j - 1)]\n\n return routes[(max_i, max_j)]",
"def grid(self) -> dict:\n raise NotImplementedError",
"def __init__(self, n):\n self._dictOut = {}\n self._dictIn = {}\n for i in range(n):\n self._dictOut[i] = []\n self._dictIn[i] = []",
"def zernike_Double_Index(nlevels):\n \n\t \n if not (nlevels>=0):\n print('Input parameter nlevels must be >= 0')\n raise AssertionError()\n \n if (nlevels == 0):\n \n m = 0\n n = 0\n \n return n, m\n \n else:\n \n # ++++ Defining layout for row number n and colunmn number m ++++++++\n\n row_n = nlevels+1\n col_m = 2*nlevels +1\n x = np.arange(row_n)\n y = np.arange(-(col_m-1)//2, (col_m+1)//2,1)\n Q = [(i,j) for i in x for j in y]\n #\n\n\n nm_index = []\n \n top = (col_m + 1)/2\n leftside = row_n*col_m - col_m + 1\n rightside = row_n*col_m \n\n k1 = 0; k2 = 0\n\n for i in xrange(top,row_n*col_m+1, 2*col_m):\n\n nm_index.append(Q[i-1])\n s1 = i + col_m + 1\n s2 = i + col_m - 1 \n jj1 = k1\n jj2 = k2\n\n\n while (s2 <= leftside): \n\n nm_index.append(Q[s2-1])\n s2 +=col_m - 1\n jj1 += 1\n jj2 -= 1\n\n leftside +=2\n\n jj1 = k1\n jj2 = k2\n\n while (s1 <= rightside): \n\n # \n nm_index.append(Q[s1-1])\n s1 +=col_m + 1\n jj1 += 1\n jj2 += 1\n\n rightside -=2\n k1 = 0; k2 += 2\n\n n = np.array(nm_index)[:,0]\n m = np.array(nm_index)[:,1]\n\n return n, m",
"def metro_phil_to_basis_dict(metro):\n for o in metro.objects:\n if o.is_scope:\n #one of the subkeys of the root object will be the detector phil. it will be the only one not extracted.\n detector_phil = o.extract()\n break\n #metro = metro.extract() # not needed\n\n bd = {(detector_phil.serial,): basis(matrix.col(detector_phil.orientation),\n matrix.col(detector_phil.translation)*1000) }\n for p in detector_phil.panel:\n bd[(detector_phil.serial,p.serial)] = basis(matrix.col(p.orientation),\n matrix.col(p.translation)*1000)\n for s in p.sensor:\n bd[(detector_phil.serial,p.serial,s.serial)] = basis(matrix.col(s.orientation),\n matrix.col(s.translation)*1000)\n for a in s.asic:\n bd[(detector_phil.serial,p.serial,s.serial,a.serial)] = basis(matrix.col(a.orientation),\n matrix.col(a.translation)*1000)\n\n return bd",
"def informacoes_ultima_nfce(self):\r\n info = {} \r\n for x in range(1,8):\r\n value = self.informacao_ultimo_nfce(x)\r\n info[value[0]] = value[1]\r\n return info",
"def getT9dict():\r\n T9dict = {}\r\n all_letters = string.lowercase\r\n T9dict.update(mapkeystoletter(2, all_letters[0:3]))\r\n T9dict.update(mapkeystoletter(3, all_letters[3:6]))\r\n T9dict.update(mapkeystoletter(4, all_letters[6:9]))\r\n T9dict.update(mapkeystoletter(5, all_letters[9:12]))\r\n T9dict.update(mapkeystoletter(6, all_letters[12:15]))\r\n T9dict.update(mapkeystoletter(7, all_letters[15:19]))\r\n T9dict.update(mapkeystoletter(8, all_letters[19:22]))\r\n T9dict.update(mapkeystoletter(9, all_letters[22:26]))\r\n T9dict[' '] = 0\r\n\r\n return T9dict",
"def pentakis(self):\n return self.nlegomena(5)",
"def cells_z(self):\n if self.is_depth:\n return list(reversed(self._cells[2]))\n return self._cells[2]",
"def zzx_to_dict(f):\n n, result = zzx_degree(f), {}\n\n for i in xrange(0, n+1):\n if f[n-i]:\n result[i] = f[n-i]\n\n return result",
"def getAllTriStimulus(self):\n return self.tristimulus",
"def yield_equilateral_triangles(cls):\n for i in range(1, 201):\n yield i-.5, i-.5, i-.5\n yield i, i, i",
"def six_hundred_cell(self):\n verts = []\n q12 = QQ(1)/2\n base = [q12,q12,q12,q12]\n for i in range(2):\n for j in range(2):\n for k in range(2):\n for l in range(2):\n verts.append([x for x in base])\n base[3] = base[3]*(-1)\n base[2] = base[2]*(-1)\n base[1] = base[1]*(-1)\n base[0] = base[0]*(-1)\n for x in permutations([0,0,0,1]):\n verts.append(x)\n for x in permutations([0,0,0,-1]):\n verts.append(x)\n g = QQ(1618033)/1000000 # Golden ratio approximation\n verts = verts + [i([q12,g/2,1/(g*2),0]) for i in AlternatingGroup(4)]\n verts = verts + [i([q12,g/2,-1/(g*2),0]) for i in AlternatingGroup(4)]\n verts = verts + [i([q12,-g/2,1/(g*2),0]) for i in AlternatingGroup(4)]\n verts = verts + [i([q12,-g/2,-1/(g*2),0]) for i in AlternatingGroup(4)]\n verts = verts + [i([-q12,g/2,1/(g*2),0]) for i in AlternatingGroup(4)]\n verts = verts + [i([-q12,g/2,-1/(g*2),0]) for i in AlternatingGroup(4)]\n verts = verts + [i([-q12,-g/2,1/(g*2),0]) for i in AlternatingGroup(4)]\n verts = verts + [i([-q12,-g/2,-1/(g*2),0]) for i in AlternatingGroup(4)]\n return Polyhedron(vertices = verts)",
"def triangle_numbers():\n counter, tri_number = 1, 1\n while True:\n yield tri_number\n counter += 1\n tri_number += counter",
"def get_dihedral_angles(self):\n mol = self.m\n c1 = mol.GetConformer(-1)\n torsma = '[!$(*#*)&!D1]~[!$(*#*)&!D1]'\n q = Chem.MolFromSmarts(torsma)\n matches = mol.GetSubstructMatches(q)\n nmat = len(matches)\n dic = {}\n for match in matches:\n j = match[0]\n k = match[1]\n bond = mol.GetBondBetweenAtoms(j, k)\n aj = mol.GetAtomWithIdx(j)\n ak = mol.GetAtomWithIdx(k)\n hj, hk = [ _hyb[_a.GetHybridization()] for _a in [aj,ak] ]\n iok1 = ( hj not in [2,3] )\n iok2 = ( hk not in [2,3] )\n if iok1 or iok2: continue\n for b1 in aj.GetBonds():\n if (b1.GetIdx() == bond.GetIdx()):\n continue\n i = b1.GetOtherAtomIdx(j)\n for b2 in ak.GetBonds():\n if (b2.GetIdx() == bond.GetIdx()) or (b2.GetIdx() == b1.GetIdx()):\n continue\n l = b2.GetOtherAtomIdx(k)\n # skip 3-membered rings\n if (l == i):\n continue\n _dang = rdMolTransforms.GetDihedralDeg(c1, i,j,k,l)\n dang = abs(_dang)\n assert dang <= 180.0\n ias4 = (i,j,k,l)\n if not self.wH:\n if np.any([ self.zs[iaa]==1 for iaa in ias4 ]):\n continue\n if self.key in ['z']:\n #print('atsi=',ias4, 'zsi=', [_zs[iaa] for iaa in ias4])\n zi,zj,zk,zl = [ self.zs[iaa] for iaa in ias4 ]\n if (zj==zk and zi>zl) or (zj>zk):\n ias4 = (l,k,j,i)\n #torsions.append(ias4)\n #_zi,_zj,_zk,_zl = [ zs[_] for _ in ias4 ]\n #typez = '%d-%d-%d-%d'%(_zi,_zj,_zk,_zl)\n type4 = tuple([self.zs[iaa] for iaa in ias4])\n if type4 in list(dic.keys()):\n dic[type4] += [dang]\n else:\n dic[type4] = [dang]\n elif self.key in ['ia','i']:\n type4 = ias4\n dic[type4] = dang\n else:\n raise Exception('#unknown key')\n return dic",
"def get_tones_dict(self):\n tone_list = []\n tone_length = 0\n # if tone is missclassified then tone length is assigned to next tone\n transition_length = 0\n IGNORE_THRESHOLD = 3\n\n prev = self.tone_list[0]\n for i, tone_class_name in enumerate(self.tone_list[1:]):\n tone_length += 1\n if prev != tone_class_name:\n mala_tone, vela_tone = self.get_abjad_tones(prev)\n\n if tone_length <= IGNORE_THRESHOLD:\n transition_length += tone_length\n else:\n tone_list.append(\n (mala_tone, vela_tone, tone_length + transition_length)\n )\n transition_length = 0\n # reset\n tone_length = 0\n prev = tone_class_name\n\n # append last\n if tone_length >= IGNORE_THRESHOLD:\n last_dict_name = self.tone_list[-1]\n mala_tone, vela_tone = self.get_abjad_tones(last_dict_name)\n tone_list.append((mala_tone, vela_tone, tone_length))\n\n # tones are here in abjad format (not in class format)\n return self.merge_same_tones(tone_list)",
"def get_resul(self):\n return {'W': self.W}",
"def get_resul(self):\n return {'W': self.W}",
"def get_resul(self):\n return {'W': self.W}",
"def get_resul(self):\n return {'W': self.W}",
"def tetrakis(self):\n return self.nlegomena(4)",
"def rate_limiters(self) -> ty.Dict[str, ty.Dict[str, int]]:",
"def __predecessors_list(self) -> Dict[int, Union[int, List[int]]]:\n predecessors = {}\n states = {}\n for row in range(0, len(self.__labyrinth)):\n for col in range(0, len(self.__labyrinth[0])):\n predecessors[self.__convert_position(row, col)] = -1\n states[self.__convert_position(row, col)] = False\n\n states[self.__convert_position()] = True\n\n row_queue = [self.__row_position]\n col_queue = [self.__col_position]\n\n while row_queue:\n current_row = row_queue.pop(0)\n current_col = col_queue.pop(0)\n\n for i in range(0, 4):\n next_row = current_row + Labyrinth.ROW_MOVE[i]\n next_col = current_col + Labyrinth.COL_MOVE[i]\n\n if self.__labyrinth[next_row][next_col] == \"#\":\n continue\n\n if states[next_row * len(self.__labyrinth[0]) + next_col]:\n continue\n\n predecessors[self.__convert_position(next_row, next_col)] = [current_row, current_col]\n\n if self.__labyrinth[next_row][next_col] == \"X\":\n break\n\n states[self.__convert_position(next_row, next_col)] = True\n\n row_queue.append(next_row)\n col_queue.append(next_col)\n\n return predecessors",
"def inverted_read_information(h, J, pivot):\r\n if pivot == 0:\r\n return [], [], []\r\n curr_idx = pivot-1\r\n mark = curr_idx\r\n cut_index = [mark]\r\n segment_length = []\r\n int_values = []\r\n nb_segment = 0\r\n binary_dict = {}\r\n while curr_idx >= 0:\r\n # init the mark_th segement in int_values\r\n int_value = h[mark]\r\n this = int_value\r\n # mv current index to next()\r\n curr_idx -= 1\r\n while curr_idx >= 0:\r\n exp = mark - curr_idx\r\n if exp not in binary_dict.keys():\r\n binary_dict[exp] = 2**exp\r\n this = [(i + binary_dict[exp]) if j == 1 else i for i, j in zip(this, h[curr_idx])]\r\n if len(set(this)) <= J and curr_idx >= 0:\r\n int_value = this\r\n curr_idx -= 1\r\n else:\r\n break\r\n\r\n int_values.append(int_value)\r\n mark = curr_idx\r\n cut_index.append(mark)\r\n nb_segment += 1\r\n\r\n transitions = []\r\n for i in range(len(int_values) - 1):\r\n transitions.append(dict(Counter([(i, j) for i, j in zip(int_values[i], int_values[i + 1])])))\r\n cut_index = cut_index[:-1]\r\n return [dict(Counter(i)) for i in int_values], transitions, cut_index[::-1]",
"def test_triangle_count_08(self):\n body = {\"direction\": \"IN\", \"degree\": 1}\n code, res = Algorithm().post_triangle_count(body, auth=auth)\n id = res[\"task_id\"]\n if id > 0:\n result = get_task_res(id, 120, auth=auth)\n print(result)\n assert result == {'edges_in': 13, 'vertices_in': 9, 'triangles': 0}\n else:\n assert 0",
"def _get_planar_tri_edges(npts, tris):\n\n # Find the nodes associated with the triangle\n node_to_tris = []\n for i in range(npts):\n node_to_tris.append([])\n\n for index, tri in enumerate(tris):\n node_to_tris[tri[0]].append(index)\n node_to_tris[tri[1]].append(index)\n node_to_tris[tri[2]].append(index)\n\n # Assign edge numbers for each edge\n edges = []\n edge_to_tris = []\n num_edges = 0\n\n tri_to_edges = []\n for i in range(len(tris)):\n tri_to_edges.append([-1, -1, -1])\n\n for tri_index, tri in enumerate(tris):\n for e1_index, e1 in enumerate(_get_tri_edges(tri)):\n if tri_to_edges[tri_index][e1_index] < 0:\n match = False\n for adj_index in node_to_tris[e1[0]]:\n if adj_index != tri_index:\n for e2_index, e2 in enumerate(_get_tri_edges(tris[adj_index])):\n if ((e1[0] == e2[0] and e1[1] == e2[1]) or\n (e1[1] == e2[0] and e1[0] == e2[1])):\n match = True\n tri_to_edges[tri_index][e1_index] = num_edges\n tri_to_edges[adj_index][e2_index] = num_edges\n edges.append((e1[0], e1[1]))\n edge_to_tris.append((tri_index, adj_index))\n num_edges += 1\n break\n if match:\n break\n\n if not match:\n edges.append((e1[0], e1[1]))\n edge_to_tris.append((tri_index, -1))\n tri_to_edges[tri_index][e1_index] = num_edges\n num_edges += 1\n\n return edges, tri_to_edges, edge_to_tris",
"def helix_triplet_stats (self):\n\n for Value in ['Phi']:\n\n HistogramPlot(np.array(self. values_list(Value, flat=True)), 'myproject/myapp/static/myapp/static/Stats/HelixTriplet/'+Value )\n #zrobic jakies dict coby robilo ranges, uzaleznialo np od zakresu albo od czegos\n\n return",
"def get_rules_dicts(self):\n\n all_rules = self.conn.get_all_rules()\n\n dic_rules_L = []\n dic_rules_R = []\n\n for line in all_rules:\n\n lemas = line[8].split(\"<sep>\")\n tags = line[11].split(\"<sep>\")\n\n if line[2] == 'R':\n\n count_pos = 0\n for index, lema in enumerate(lemas):\n\n key_dic = tags[index] + \"<sep>\" + lema\n\n try:\n dic = dic_rules_R[count_pos]\n except IndexError:\n dic_rules_R.insert(count_pos,{})\n dic = dic_rules_R[count_pos]\n\n\n if key_dic in dic.keys():\n dic[key_dic] += 1\n else:\n dic[key_dic] = 1\n\n dic_rules_R[count_pos] = dic\n\n count_pos +=1\n\n else:\n lemas = reversed(lemas)\n tags = list(reversed(tags))\n\n count_pos = 0\n for index, lema in enumerate(lemas):\n\n key_dic = tags[index] + \"<sep>\" + lema\n\n try:\n dic = dic_rules_L[count_pos]\n except IndexError:\n dic_rules_L.insert(count_pos,{})\n dic = dic_rules_L[count_pos]\n\n\n if key_dic in dic.keys():\n dic[key_dic] += 1\n else:\n dic[key_dic] = 1\n\n dic_rules_L[count_pos] = dic\n\n count_pos +=1\n\n return dic_rules_R, dic_rules_L",
"def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]:\n # type: () -> Dict[Text: Union[Dict, List[Dict]]]\n return {\n \"numero_prendas\": [\n self.from_entity(entity=\"number\"),\n\n ]\n }",
"def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]:\n # type: () -> Dict[Text: Union[Dict, List[Dict]]]\n return {\n \"numero_prendas\": [\n self.from_entity(entity=\"number\"),\n\n ]\n }",
"def information(counts: list) -> list:\n heights = []\n # magic\n e = (1 / math.log(2)) * ((4 - 1) / (2 * sum([counts[1][base] for base in \"ACGT\"])))\n for column_count in counts:\n relative_frqs = {base: column_count[base] / sum(column_count.values()) for base in \"ACGT\"}\n H = -1 * sum([relative_frqs[base] * math.log2(relative_frqs[base]) for base in \"ACGT\"])\n R = math.log2(4) - (H + e)\n heights.append({base: relative_frqs[base] * R for base in \"ACGT\"})\n # end magic\n return heights",
"def _generate_table(self):\n for i in xrange(32):\n dest = [0]\n gw = [0]\n self._table.append(\n {'destination': dest, 'gateway': gw}\n )",
"def buildBoard(self, n):\n\n boardDict = []\n diagCount = 0\n\n for i in range(n):\n self.rows[i] = [True, \"\", 0] #homogenous, X/O, count of X's/O's\n self.cols[i] = [True, \"\", 0]\n for j in range(n):\n\n# Is there a faster way to make this array than nested for loops?\n boardDict.append((i,j))\n return boardDict",
"def generate_grid_dict(height, width):\n board = {}\n for i in range(height):\n for j in range(width):\n position = (i, j)\n board[position] = 0\n return board",
"def aPosteriori(self) -> dict:\n\n simbIn = self.simbIn\n simbOut = self.simbOut\n probIn = self.probIn\n probOut = self.probOut\n mat = self.mat\n\n return {\n i: {\n j: mat[i][j] * probIn[i] / probOut[j] for j in simbOut\n } for i in simbIn\n }"
] | [
"0.67723066",
"0.6626647",
"0.5800086",
"0.5686751",
"0.5580213",
"0.55674565",
"0.5506504",
"0.54808724",
"0.5421397",
"0.54160047",
"0.5405297",
"0.5372639",
"0.5343492",
"0.5306792",
"0.530101",
"0.5289107",
"0.5287096",
"0.5276585",
"0.52750313",
"0.52660143",
"0.52657723",
"0.52529484",
"0.5239551",
"0.5237266",
"0.52153945",
"0.51736224",
"0.5171337",
"0.51637864",
"0.5154374",
"0.5137404",
"0.51357305",
"0.5119255",
"0.51043755",
"0.5064137",
"0.50610673",
"0.50563264",
"0.5051029",
"0.50488645",
"0.5028159",
"0.5027574",
"0.5015453",
"0.5009708",
"0.500852",
"0.5007944",
"0.5007001",
"0.50052446",
"0.5000808",
"0.499613",
"0.49958217",
"0.49919945",
"0.49891853",
"0.49849993",
"0.49843097",
"0.49791822",
"0.49756646",
"0.49740517",
"0.49688193",
"0.4958381",
"0.49542007",
"0.4941401",
"0.49392813",
"0.49361068",
"0.49340367",
"0.49259666",
"0.49251574",
"0.49250072",
"0.4918857",
"0.49116588",
"0.49094528",
"0.49085522",
"0.49047822",
"0.4903735",
"0.4891139",
"0.4889586",
"0.48886967",
"0.4882992",
"0.48766947",
"0.48743457",
"0.4867361",
"0.48665074",
"0.48571676",
"0.4856388",
"0.4856388",
"0.4856388",
"0.4856388",
"0.4854793",
"0.48525885",
"0.48462304",
"0.48434556",
"0.48405612",
"0.48313525",
"0.4828773",
"0.48248777",
"0.481329",
"0.481329",
"0.4812913",
"0.48096532",
"0.48076063",
"0.47961736",
"0.47958186"
] | 0.6907697 | 0 |
Computes the (Xc, Yc) coordinates of actuator centres inside a circle of rho_aper, assuming there are N_actuators along the [1, 1] line | def actuator_centres(N_actuators, rho_aper=RHO_APER, rho_obsc=RHO_OBSC):
x0 = np.linspace(-1., 1., N_actuators, endpoint=True)
delta = x0[1] - x0[0]
N_in_D = 2*RHO_APER/delta
print('%.2f actuators in D' %N_in_D)
max_freq = N_in_D / 2 # Max spatial frequency we can sense
xx, yy = np.meshgrid(x0, x0)
x_f = xx.flatten()
y_f = yy.flatten()
act = []
for x_c, y_c in zip(x_f, y_f):
r = np.sqrt(x_c ** 2 + y_c ** 2)
if r < 0.97 * rho_aper and r > 1.05 * rho_obsc:
act.append([x_c, y_c])
total_act = len(act)
print('Total Actuators: ', total_act)
return [act, delta], max_freq | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def center_of_charge(self):\n ret = [0.0, 0.0, 0.0]\n total_c = 0.0\n\n for at in range(self.natom()):\n c = self.charge(at)\n ret = add(ret, scale(self.xyz(at), c))\n total_c += c\n\n ret = scale(ret, 1.0 / total_c)\n return ret",
"def gen_centers(self):\n\n \"\"\"x_track = self.cs.discrete_rollout()\n t = np.arange(len(x_track))*self.dt\n # choose the points in time we'd like centers to be at\n c_des = np.linspace(0, self.cs.run_time, self.n_bfs)\n self.c = np.zeros(len(c_des))\n for ii, point in enumerate(c_des):\n diff = abs(t - point)\n self.c[ii] = x_track[np.where(diff == min(diff))[0][0]]\"\"\"\n\n # desired activations throughout time\n des_c = jnp.linspace(0, self.cs.run_time, self.n_bfs)\n\n self.c = np.ones(len(des_c))\n for n in range(len(des_c)):\n # finding x for desired times t\n self.c[n] = jnp.exp(-self.cs.ax * des_c[n])\n self.c = jnp.array(self.c)",
"def compute_center(self, mole_object):\r\n if mole_object.plugin_type == \"PyMOL\":\r\n sel = PymolPlugin.PymolPlugin().get_model('all')\r\n cnt = len(sel.atom)\r\n\r\n else:\r\n sel = ChimeraPlugin.ChimeraPlugin().select()\r\n cnt = len(ChimeraPlugin.ChimeraPlugin().current_atoms())\r\n\r\n cent_x = 0\r\n cent_y = 0\r\n cent_z = 0\r\n\r\n if cnt == 0:\r\n return 0, 0, 0\r\n\r\n if mole_object.plugin_type == \"PyMOL\":\r\n\r\n for a in sel.atom:\r\n cent_x += a.coord[0]\r\n cent_y += a.coord[1]\r\n cent_z += a.coord[2]\r\n\r\n else:\r\n\r\n for a in ChimeraPlugin.ChimeraPlugin().current_atoms():\r\n cent_x += a.coord()[0]\r\n cent_y += a.coord()[1]\r\n cent_z += a.coord()[2]\r\n\r\n cent_x /= cnt\r\n cent_y /= cnt\r\n cent_z /= cnt\r\n\r\n self.point_x.component('entryfield').setentry(cent_x)\r\n self.point_y.component('entryfield').setentry(cent_y)\r\n self.point_z.component('entryfield').setentry(cent_z)\r\n\r\n self.show_crisscross(mole_object)",
"def find_center(r):\n cx=r.corner.x+(r.width/2)\n cy=r.corner.y+(r.height/2)\n return cx,cy",
"def circle_center(self):\n return self.container.width / 2, self.container.height / 2",
"def find_cea_coord(header,phi_c,lambda_c,nx,ny,dx,dy):\n nx = int(nx)\n ny = int(ny)\n\n # Array of CEA coords\n x = []\n y = []\n\n for j in range(ny):\n col = []\n row = []\n for i in range(nx):\n col.append(np.radians((i-(nx-1)/2)*dx))\n row.append(np.radians((j-(ny-1)/2)*dy))\n x.append(col)\n y.append(row)\n\n x = np.array(x)\n y = np.array(y)\n\n # Relevant header values\n rSun = header['rsun_obs']/header['cdelt1'] #solar radius in pixels\n disk_latc = np.radians(header['CRLT_OBS'])\n disk_lonc = np.radians(header['CRLN_OBS'])\n disk_xc = header['CRPIX1'] - 1 #disk center wrt lower left of patch\n disk_yc = header['CRPIX2'] - 1\n pa = np.radians(header['CROTA2']*-1)\n\n latc = np.radians(lambda_c)\n lonc = np.radians(phi_c) - disk_lonc\n\n # Convert coordinates\n lat = []\n lon = []\n xi = []\n eta = []\n\n for j in range(ny):\n lat_col = []\n lon_col = []\n xi_col = []\n eta_col = []\n for i in range(nx):\n lat0,lon0 = plane2sphere(x[j,i],y[j,i],latc,lonc)\n lat_col.append(lat0)\n lon_col.append(lon0)\n\n xi0,eta0 = sphere2img(lat0,lon0,disk_latc,0.0,disk_xc,disk_yc,rSun,pa)\n xi_col.append(xi0)\n eta_col.append(eta0)\n lat.append(lat_col)\n lon.append(lon_col)\n xi.append(xi_col)\n eta.append(eta_col)\n\n lat = np.array(lat)\n lon = np.array(lon)\n xi = np.array(xi)\n eta = np.array(eta)\n\n return xi,eta,lat,lon",
"def points_on_circumference(center=(0, 0), r=50, n=100):\n\treturn [\n (\n center[0]+(cos(2 * pi / n * x) * r), \n center[1] + (sin(2 * pi / n * x) * r) \n\n ) for x in range(0, n + 1)]",
"def getCartesianPoints2(r, theta, center):\n x = r * np.cos(theta) + center[0]\n y = r * np.sin(theta) + center[1]\n\n return x, y",
"def centers(self):\n return self.xc, self.yc",
"def get_circle_coords(center, r):\n circle = [[r, 180* phi/3.14159265] for phi in range(0, 180, 5)]\n circle = [pol2cart(p[0], p[1]) + (center[0], center[1]) for p in circle]\n return circle",
"def centroid(cnt):\n\tM = cv2.moments(cnt)\n\tcx = int(M['m10']/M['m00'])\n\tcy = int(M['m01']/M['m00'])\n\treturn (cx, cy)",
"def generate_circle(R,center,N=100,t0=0.0,t1=2.0*np.pi):\r\n theta = np.linspace(t0,t0+t1,N)\r\n y = R*np.sin(theta) + center[1]\r\n x = R*np.cos(theta) + center[0]\r\n return x,y",
"def get_arc_center(self):\n # First two anchors and handles\n a1, h1, h2, a2 = self.points[:4]\n # Tangent vectors\n t1 = h1 - a1\n t2 = h2 - a2\n # Normals\n n1 = rotate_vector(t1, TAU / 4)\n n2 = rotate_vector(t2, TAU / 4)\n try:\n return line_intersection(\n line1=(a1, a1 + n1),\n line2=(a2, a2 + n2),\n )\n except Exception:\n warnings.warn(\"Can't find Arc center, using ORIGIN instead\")\n return np.array(ORIGIN)",
"def image_proc(self):\r\n res_erode, res_in_rect = self._get_res_image()\r\n\r\n Moment_rect = cv2.moments(res_in_rect)\r\n if (Moment_rect[\"m00\"] != 0):\r\n self.rect_x = int(Moment_rect[\"m10\"] / Moment_rect[\"m00\"])\r\n self.rect_y = int(Moment_rect[\"m01\"] / Moment_rect[\"m00\"])\r\n\r\n # afin de trouver les centres de la rectangulaire.\r\n cnts, hierarchy = cv2.findContours(res_erode,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)\r\n for i in cnts:\r\n #Moment = cv2.moments(cnts)\r\n moment = cv2.moments(res_erode)\r\n self.cx = int(moment[\"m10\"] / moment[\"m00\"])\r\n self.cy = int(moment[\"m01\"] / moment[\"m00\"])\r\n cv2.circle(output_image,(self.cx,self.cy),7,(255,255,255),-1)\r\n cv2.putText(output_image, \"center\", (self.cx, self.cy), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)\r\n #print(cX,cY)\r",
"def get_circle(a, b, c):\n vec = [a[0]**2 + a[1]**2, b[0]**2 + b[1]**2, c[0]**2 + c[1]**2]\n x_mat = [vec, [a[1], b[1], c[1]], [1]*3]\n y_mat = [vec, [a[0], b[0], c[0]], [1]*3]\n d_mat = [[a[0], b[0], c[0]], [a[1], b[1], c[1]], [1] * 3]\n d = 2 * det(d_mat)\n x = 1 / d * det(x_mat)\n y = -1 / d * det(y_mat)\n center = [x, y]\n #r = norm(center - a)\n r = norm([center[0]-a[0], center[1]-a[1]])\n return center, r",
"def calculate_center(self):\n return [(self.startX + self.endX) / 2., (self.startY + self.endY) / 2.]",
"def center(self):\n xc = (self.x.max() + self.x.min())/2.\n yc = (self.y.max() + self.y.min())/2.\n return (xc, yc)",
"def center(self):\n return np.array([0,0,1/self.C+self.pos()])",
"def center(self):\n return [self.position[i]+self.radius for i in range(2)]",
"def plotCentroid(img, cnt, radius = 3, color=(255, 255, 0)):\n\tcx, cy = centroid(cnt)\n\tdrawCircle(img, (cx, cy), radius = radius, color = color)\n\treturn (cx, cy)",
"def attacker_position(inputs):\n rho, theta, _, _, _ = inputs\n x = rho * np.cos(theta)\n y = rho * np.sin(theta)\n return x, y",
"def center_of_coor(coordinates):\n return (np.sum(coordinates, axis=0) / coordinates.shape[0])",
"def calc_R(center):\r\n xc = center[0]\r\n yc = center[1]\r\n return np.sqrt((x-xc)**2 + (y-yc)**2)",
"def get_circle_coords(self, radius, divider, count,center_x, center_y):\n\n angle_deg = (360/divider)*count\n angle = radians(angle_deg-(90 + (360/divider)))\n x = radius*cos(angle) + center_x;\n y = radius*sin(angle) + center_y;\n return (int(x), int(y))",
"def getAfinityCenter(width, height, point, center, radius=7, img_affinity=None):\n tensor = torch.zeros(2, height, width).float()\n\n # Create the canvas for the affinity output\n imgAffinity = Image.new(\"RGB\", (width, height), \"black\")\n totensor = transforms.Compose([transforms.ToTensor()])\n\n draw = ImageDraw.Draw(imgAffinity)\n r1 = radius\n p = point\n draw.ellipse((p[0] - r1, p[1] - r1, p[0] + r1, p[1] + r1), (255, 255, 255))\n\n del draw\n\n # Compute the array to add the affinity\n array = (np.array(imgAffinity) / 255)[:, :, 0]\n\n angle_vector = np.array(center) - np.array(point)\n angle_vector = normalize(angle_vector)\n affinity = np.concatenate([[array * angle_vector[0]], [array * angle_vector[1]]])\n\n # print (tensor)\n if not img_affinity is None:\n # Find the angle vector\n # print (angle_vector)\n if length(angle_vector) > 0:\n angle = py_ang(angle_vector)\n else:\n angle = 0\n # print(angle)\n c = np.array(colorsys.hsv_to_rgb(angle / 360, 1, 1)) * 255\n draw = ImageDraw.Draw(img_affinity)\n draw.ellipse((p[0] - r1, p[1] - r1, p[0] + r1, p[1] + r1), fill=(int(c[0]), int(c[1]), int(c[2])))\n del draw\n re = torch.from_numpy(affinity).float() + tensor\n return re, img_affinity",
"def aerodynamic_center(self, chord_fraction: float = 0.25) -> np.ndarray:\n sectional_areas = self.area(_sectional=True)\n sectional_ACs = []\n\n for inner_xsec, outer_xsec in zip(self.xsecs[:-1], self.xsecs[1:]):\n\n section_taper_ratio = outer_xsec.chord / inner_xsec.chord\n section_MAC_length = (2 / 3) * inner_xsec.chord * (\n (1 + section_taper_ratio + section_taper_ratio ** 2) /\n (1 + section_taper_ratio)\n )\n section_MAC_le = (\n inner_xsec.xyz_le +\n (outer_xsec.xyz_le - inner_xsec.xyz_le) *\n (1 + 2 * section_taper_ratio) /\n (3 + 3 * section_taper_ratio)\n )\n section_AC = section_MAC_le + np.array([ # TODO rotate this vector by the local twist angle\n chord_fraction * section_MAC_length,\n 0,\n 0\n ])\n\n sectional_ACs.append(section_AC)\n\n sectional_AC_area_products = [\n AC * area\n for AC, area in zip(\n sectional_ACs,\n sectional_areas,\n )\n ]\n\n aerodynamic_center = sum(sectional_AC_area_products) / sum(sectional_areas)\n\n aerodynamic_center += self.xyz_le\n\n if self.symmetric:\n aerodynamic_center[1] = 0\n\n return aerodynamic_center",
"def get_center_ball(self, output):\n output = output.reshape((360, 640))\n\n # cv2 image must be numpy.uint8, convert numpy.int64 to numpy.uint8\n output = output.astype(np.uint8)\n\n # reshape the image size as original input image\n heatmap = cv2.resize(output, (640, 360))\n\n # heatmap is converted into a binary image by threshold method.\n ret, heatmap = cv2.threshold(heatmap, 127, 255, cv2.THRESH_BINARY)\n\n # find the circle in image with 2<=radius<=7\n circles = cv2.HoughCircles(heatmap, cv2.HOUGH_GRADIENT, dp=1, minDist=1, param1=50, param2=2, minRadius=2,\n maxRadius=7)\n # check if there have any tennis be detected\n if circles is not None:\n # if only one tennis be detected\n if len(circles) == 1:\n x = int(circles[0][0][0])\n y = int(circles[0][0][1])\n\n return x, y\n return None, None",
"def circle(draw, centrex, centrey, radius, color=\"#AAAAAAFF\") -> None:\n # convert cartesian centre to pixel centre\n cx, cy = pixelcoord(centrex, centrey)\n # top left and bottom right coordinates\n rect = [(cx-radius, cy-radius), (cx+radius, cy+radius)]\n # draw\n draw.arc(rect, 0, 360, color)",
"def find_center(self) -> tuple:\r\n \r\n # Add up all the x values of pixels in the plant\r\n # Then divide by total pixels in the plant\r\n avg_x = sum([i[0] for i in self.cluster]) / len(self.cluster)\r\n\r\n # Add up all the y values of pixels in the plant\r\n # Then divide by total pixels in the plant\r\n avg_y = sum([i[1] for i in self.cluster]) / len(self.cluster)\r\n\r\n self.center = (int(round(avg_x)), int(round(avg_y)))\r\n \r\n # return the results in a tuple of integers\r\n return self.center",
"def _compute_ball_visualization(self, center, radius, angle):\r\n x_coord = [center[0]]\r\n y_coord = [center[1]]\r\n\r\n angles = np.linspace(angle, angle + 2 * np.pi, 100)\r\n\r\n x_coord.extend([center[0] - radius * np.sin(a) for a in angles])\r\n y_coord.extend([center[1] + radius * np.cos(a) for a in angles])\r\n\r\n return [x_coord, y_coord]",
"def getCartesianPoints(rTheta, center):\n if rTheta.ndim == 2:\n x = rTheta[:, 0] * np.cos(rTheta[:, 1]) + center[0]\n y = rTheta[:, 0] * np.sin(rTheta[:, 1]) + center[1]\n else:\n x = rTheta[0] * np.cos(rTheta[1]) + center[0]\n y = rTheta[0] * np.sin(rTheta[1]) + center[1]\n\n return np.array([x, y]).T",
"def pos_on_semicircle(x, r, cxy):\n pos = np.sqrt(r ** 2 - (x - cxy[0]) ** 2) + cxy[1]\n\n return pos",
"def center_of_mass(im_binary, x_offset=0, y_offset=0):\n n = np.sum(im_binary)\n\n x = np.arange(im_binary.shape[1]) + x_offset\n y = np.arange(im_binary.shape[0]) + y_offset\n xv, yv = np.meshgrid(x, y)\n cx = np.sum(xv[im_binary]) / n\n cy = np.sum(yv[im_binary]) / n\n\n return cx, cy",
"def circumcenter(C):\n ri, rj, rk = C.transpose(1,2,0)\n ax, ay = ri\n bx, by = rj\n cx, cy = rk\n d = 2 * (ax * (by - cy) + bx * (cy - ay) + cx * (ay - by))\n ux = ((ax * ax + ay * ay) * (by - cy) + (bx * bx + by * by) * (cy - ay) + (cx * cx + cy * cy) * (\n ay - by)) / d\n uy = ((ax * ax + ay * ay) * (cx - bx) + (bx * bx + by * by) * (ax - cx) + (cx * cx + cy * cy) * (\n bx - ax)) / d\n vs = np.empty((ax.size,2),dtype=np.float64)\n vs[:,0],vs[:,1] = ux,uy\n return vs",
"def find_center(self):\n x = np.int(np.rint((len(self.grid[0][0]))/2))\n center = np.array([x, x, x])\n self.grid[center[0]][center[1]][center[2]] = 1\n return self.grid, center",
"def get_centers(file_path = \"./data/input.jpeg\", n_rows=10, n_cols=10, verbose=False, image_thresh=20, hough_thresh=20, minDist=20, maxRadius=20):\n print(\"looking for circles in the image\")\n cimg = cv2.imread(file_path, 1)\n gimg = cv2.cvtColor(cimg, cv2.COLOR_BGR2GRAY)\n gimg = cv2.medianBlur(gimg, 5)\n # threshold the image for better circle finding\n _, gimg = cv2.threshold(gimg, image_thresh, 255, cv2.THRESH_BINARY)\n\n\n # find circles on the image\n circles = cv2.HoughCircles(gimg, cv2.HOUGH_GRADIENT, dp=1, minDist=minDist, param1=1, param2=hough_thresh, minRadius=5,\n maxRadius=maxRadius)\n\n # re-sequence the circles found\n circles = circles[0, :, :].reshape((-1, 3))\n x_min = circles.min(axis=0)[0]\n x_max = circles.max(axis=0)[0]\n avg_spacing = (x_max - x_min) / (n_cols - 1)\n x_segment = [x_min - avg_spacing / 2 + avg_spacing * i for i in range(11)]\n def get_col_id(point):\n # return which col this point sits\n x = point[0]\n for i in range(n_cols):\n if x_segment[i] < x < x_segment[i + 1]:\n return i\n cols = [[] for i in range(n_cols)]\n for circle in circles:\n id = get_col_id(circle)\n cols[get_col_id(circle)].append(circle)\n for i, col in enumerate(cols):\n cols[i] = list(sorted(col, key=lambda p:p[1]))\n grid = np.zeros((n_rows, n_cols, 3))\n for c in range(n_cols):\n for r in range(n_rows):\n grid[r, c, :] = cols[c][r]\n\n # plot theses circles to make sure the sequence is correct\n gaussian_img = cv2.GaussianBlur(cv2.cvtColor(cimg, cv2.COLOR_BGR2GRAY), (5, 5), 0)\n for c in range(10):\n for r in range(10):\n pos = grid[r, c, :2].astype(np.int)\n radius = grid[r, c, 2].astype(np.int)\n # draw the outer circle\n cv2.circle(cimg, (pos[0], pos[1]), radius, (0, 255, 0), 2)\n # draw the center of the circle\n cv2.circle(cimg, (pos[0], pos[1]), 2, (0, 0, 255), 3)\n # find the avg intensity at each center\n grid[r, c, 2] = gaussian_img[pos[1], pos[0]]\n if verbose:\n cv2.imshow('circles found in the image', cimg)\n cv2.waitKey(10)\n\n if verbose:\n cv2.imshow('circles found in the image', cimg)\n cv2.waitKey(0)\n cv2.destroyAllWindows()\n\n return grid",
"def _move_receptor_to_grid_center(self):\n lower_receptor_corner = np.array([self._crd[:,i].min() for i in range(3)], dtype=float)\n upper_receptor_corner = np.array([self._crd[:,i].max() for i in range(3)], dtype=float)\n \n receptor_box_center = (upper_receptor_corner + lower_receptor_corner) / 2.\n grid_center = (self._origin_crd + self._uper_most_corner_crd) / 2.\n displacement = grid_center - receptor_box_center\n\n print(\"Receptor is translated by \", displacement)\n\n for atom_ind in range(len(self._crd)):\n self._crd[atom_ind] += displacement\n return None",
"def project_p2c_points(R, C, H): #---- project to cylindrical\r\n Rc= H[0]/2; Cc= H[1]/2; # center coordinate\r\n phi = H[3]; S= H[4]; # rotation angle and sizing\r\n Tv = H[5]; Tu= H[6]; # displacement\r\n COSF= np.cos(phi); SINF= np.sin(phi); # \r\n U = Tu + S*( COSF*(C- Cc)- SINF*(R- Rc) ); \r\n V = Tv + S*( SINF*(C- Cc)+ COSF*(R- Rc) );\r\n return V, U",
"def _get_pointFromEllipseAngle(self, centerx, centery, radiush, radiusv, ang):\r\n th = np.radians(ang)\r\n ratio = (radiush/2.0)/float(radiusv/2.0)\r\n x = centerx + radiush/2.0 * np.cos(th)\r\n y = centery + radiusv/2.0 * np.sin(th)\r\n return int(x), int(y)",
"def center_of_contour(contorno):\n M = cv2.moments(contorno)\n # Usando a expressão do centróide definida em: https://en.wikipedia.org/wiki/Image_moment\n if M[\"m00\"]!=0:\n cX = int(M[\"m10\"] / M[\"m00\"])\n cY = int(M[\"m01\"] / M[\"m00\"])\n return (int(cX), int(cY))\n else:\n return (200,150)",
"def _get_midplane_polar_coords(self, x0, y0, inc, PA):\n x_mid, y_mid = self._get_midplane_cart_coords(x0, y0, inc, PA)\n return np.hypot(y_mid, x_mid), np.arctan2(y_mid, x_mid)",
"def getcenter(self):\n return self.centro.cartesianas()",
"def calculate_cohesion(boids: List[b.Boid]) -> Tuple[float, float]:\n center_x = 0\n center_y = 0\n for boid in boids:\n center_x += boid.x\n center_y += boid.y\n return center_x / len(boids), center_y / len(boids)",
"def polar_coord(point, center):\n x = point[0] - center[0]\n y = point[1] - center[1]\n rho = np.sqrt(x ** 2 + y ** 2)\n phi = np.arctan2(y, x)\n return np.array([phi, rho])",
"def getAfinityCenter(width, height, point, center, radius=7, img_affinity=None):\n tensor = torch.zeros(2,height,width).float()\n\n # Create the canvas for the afinity output\n imgAffinity = Image.new(\"RGB\", (width,height), \"black\")\n totensor = transforms.Compose([transforms.ToTensor()])\n \n draw = ImageDraw.Draw(imgAffinity) \n r1 = radius\n p = point\n draw.ellipse((p[0]-r1,p[1]-r1,p[0]+r1,p[1]+r1),(255,255,255))\n\n del draw\n\n # Compute the array to add the afinity\n array = (np.array(imgAffinity)/255)[:,:,0]\n\n angle_vector = np.array(center) - np.array(point)\n angle_vector = normalize(angle_vector)\n affinity = np.concatenate([[array*angle_vector[0]],[array*angle_vector[1]]])\n\n # print (tensor)\n if not img_affinity is None:\n # Find the angle vector\n # print (angle_vector)\n if length(angle_vector) >0:\n angle=py_ang(angle_vector)\n else:\n angle = 0\n # print(angle)\n c = np.array(colorsys.hsv_to_rgb(angle/360,1,1)) * 255\n draw = ImageDraw.Draw(img_affinity) \n draw.ellipse((p[0]-r1,p[1]-r1,p[0]+r1,p[1]+r1),fill=(int(c[0]),int(c[1]),int(c[2])))\n del draw\n re = torch.from_numpy(affinity).float() + tensor\n return re, img_affinity",
"def center(self):\n return (self.centerx, self.centery)",
"def circle_center(top_aerofoil_points, bottom_aerofoil_points):\n q = np.array(top_aerofoil_points[0].coordinates) - np.array(top_aerofoil_points[1].coordinates)\n r = np.array(bottom_aerofoil_points[-1].coordinates) - np.array(bottom_aerofoil_points[-2].coordinates)\n c = np.cross(q, [0, 0, -1]) / np.linalg.norm(q)\n d = np.cross(r, [0, 0, 1]) / np.linalg.norm(r)\n radius = (q[1] - r[1]) / (d[1] - c[1])\n s = q + radius * c\n return Point(tuple(-s))",
"def discretized_circle(radius, n_pts):\n x1 = np.zeros(n_pts)\n y1 = np.zeros(n_pts)\n for i in range(0, n_pts):\n x1[i] = np.cos(2 * np.pi / n_pts * i) * radius\n y1[i] = np.sin(2 * np.pi / n_pts * i) * radius\n\n x2 = np.roll(x1, -1)\n y2 = np.roll(y1, -1)\n return x1, y1, x2, y2",
"def i_coords(self):\n ref_x = np.arange(-self.ref_w / 2, self.ref_w / 2 + 0.002, 0.002)\n\n if self.ref_shape == 'c': # Curved reflector\n dist_coords1 = [(ref_x[i], pos_on_semicircle(ref_x[i], self.R, self.c_xy)) for i in range(self.I)]\n dist_coords2 = [(ref_x[i + 1], pos_on_semicircle(ref_x[i + 1], self.R, self.c_xy)) for i in range(self.I)]\n a_i = [distance(dist_coords1[i], dist_coords2[i]) for i in range(self.I)]\n\n cx_i = [ref_x[i] + (ref_x[i + 1] - ref_x[i]) / 2 for i in range(self.I)]\n cy_i = [pos_on_semicircle(x, self.R, self.c_xy) for x in cx_i]\n i_coords = list(zip(cx_i, cy_i))\n else: # Flat reflector\n a_i = [(ref_x[i + 1] - ref_x[i]) / 2 for i in range(self.I)]\n cx_i = [ref_x[i] + (ref_x[i + 1] - ref_x[i]) / 2 for i in range(self.I)]\n i_coords = [(x, self.h) for x in cx_i]\n d = {'ref_x': ref_x, 'A_i': a_i, 'I_coords': i_coords, 'cx_i': cx_i}\n\n return d",
"def cinters_circle(self, c):\r\n if self.__segments == None:\r\n self.__load_segments()\r\n \r\n result = []\r\n for segment in self.__segments:\r\n points = c.inters_segment(segment)\r\n for p in points:\r\n result.append(p) \r\n \r\n return result",
"def draw_phys_dist_container(self, center_x, center_y, radius):\n\n dis_con_point_1 = int(center_x - radius * 0.95 * sin(radians(15))), int(center_y + radius * 0.95 * cos(radians(45)))\n dis_con_point_2 = int(center_x + radius * 0.95 * sin(radians(15))), int(center_y + radius * 0.95 * cos(radians(45)))\n dis_con_point_3 = int(center_x + radius * 0.95 * sin(radians(20))), int(center_y + radius * 0.95 * cos(radians(15)))\n dis_con_point_4 = int(center_x - radius * 0.95 * sin(radians(20))), int(center_y + radius * 0.95 * cos(radians(15)))\n\n cv2.line(self.image, dis_con_point_1, dis_con_point_2, (0, 0, 255), 1, cv2.LINE_AA)\n cv2.line(self.image, dis_con_point_2, dis_con_point_3, (0, 0, 255), 1, cv2.LINE_AA)\n cv2.line(self.image, dis_con_point_1, dis_con_point_4, (0, 0, 255), 1, cv2.LINE_AA)",
"def center_coords(self):\n coords = set()\n for x in range(self.radius, self.container.width - self.radius):\n for y in range(self.radius, self.container.height - self.radius):\n coords.add((x, y))\n\n return coords",
"def generate_circle_by_angles(t, C, r, theta, phi):\n n = np.array([np.cos(phi)*np.sin(theta), np.sin(phi)*np.sin(theta), np.cos(theta)])\n u = np.array([-np.sin(phi), np.cos(phi), 0])\n\n P_circle = r*np.cos(t)[:, np.newaxis]*u + r*np.sin(t)[:, np.newaxis]*np.cross(n, u) + C\n\n return P_circle",
"def circ_dist(azimuth1, azimuth2, radius=1.0):\n return np.arccos(np.cos(azimuth1 - azimuth2))",
"def circle():\n xmin=0\n xmax=6.5\n ymin=0.\n ymax=6.5\n\n x = arange(xmin, xmax, 0.005)\n y = x*1.\n [xx, yy] = meshgrid(x, y)\n\n zz=sqrt((xx-3.2475)**2.+(yy-3.2475)**2.)\n zz2=zz*1.\n zz2[(zz <= 3.25)]=1.\n zz2[(zz <= 3.25*0.2)]=0.\n zz2[(zz > 3.25)]=0.\n zz3=zeros(numpy.array(numpy.shape(zz2))/10)\n for i in arange(len(xx)/10):\n for j in arange(len(yy)/10):\n zz3[i,j]=numpy.sum(zz2[(i*10):(i*10+10),(j*10):(j*10+10)])/100.\n\n return zz3",
"def calculate_fiber_center(self, x, y, crop_size=15):\n self.logger.info(f'Calculating fiber center using ({x}, {y})')\n image = np.copy(self.camera_fiber.temp_image)\n self.fiber_center_position = self.calculate_gaussian_centroid(image, x, y, crop_size)\n return [x,y] #m",
"def get_uvcircle(Grid):\n \n# center of circulation\n loc=-67.5;lac=41.5; \n dx=(Grid['lonc']-loc)*Grid['coslatc']\n dy=(Grid['latc']-lac)\n di=np.sqrt(dx*dx+dy*dy)\n an=np.angle(dx+1j*dy)\n# velocity is linearly increasing with distance \n# 0.1 m/s at 1 deg distance away from center \n# cyclonic gyre \n u=-0.1*di*np.sin(an)\n v= 0.1*di*np.cos(an)\n# adjust the velocity so that the rotation will be perfect \n# on lon-lat plane\n u=u*Grid['coslatc']/np.cos(lac*np.pi/180) \n \n return u,v",
"def azizen(self):\n # x0,y0 array pixel coordinates relative to cx,cy\n# ndy0,ndx0=img.shape\n ndy0=self.ndy0\n ndx0=self.ndx0\n x0,y0=np.meshgrid(np.linspace(0,ndx0-1,ndx0)-self.cx,np.linspace(0,ndy0-1,ndy0)-self.cy)\n r0=np.sqrt(x0**2+y0**2)/self.pr0 # fractional radial distance from 0,0\n# self.roi=np.s_[ystart:ystart+self.ny0,xstart:xstart+self.nx0]\n # why not model the zenith angle dependence with polynomial directly\n # rather than linear interpolation between roots.\n roots=np.zeros(51)\n rr=np.arange(51)/100.0\n for i,ref in enumerate(rr):\n roots[i]=np.real(np.roots([self.c3,0,self.c2,0,self.c1,-ref])[-1])\n theta0 = np.interp(r0/2,rr,roots)\n \n phi0 = np.arctan2(x0,y0) - self.rot ####phi (i.e., azimuth) is reckoned with -pi corresponding to north, increasing clockwise, NOTE: pysolar use sub-standard definition\n phi0 = phi0%(2*np.pi)\n\n #####correction for the tilt of the camera\n k=np.array((np.sin(self.azm),np.cos(self.azm),0))\n a=np.array([np.sin(theta0)*np.cos(phi0),np.sin(theta0)*np.sin(phi0),np.cos(theta0)]); \n a = np.transpose(a,[1,2,0])\n b=np.cos(self.beta)*a + np.sin(self.beta)*np.cross(k,a,axisb=2) \\\n + np.reshape(np.outer(np.dot(a,k),k),(self.ndy0,self.ndx0,3))*(1-np.cos(self.beta))\n theta0=np.arctan(np.sqrt(b[:,:,0]**2+b[:,:,1]**2)/b[:,:,2])\n phi0=np.arctan2(b[:,:,1],b[:,:,0])%(2*np.pi)\n# max_theta *= deg2rad \n# valid0 = (theta0<max_theta) & (theta0>0); \n# theta0[valid0]=np.nan;\n self.theta0,self.phi0=theta0,phi0",
"def get_centre(self):\n # just get the centroid\n # perhaps try something like:\n # https://github.com/mapbox/polylabel/blob/master/polylabel.js\n # in the future\n coords = np.array([(n.x, n.y) for n in self.nodes])\n centre_x = coords[:, 0].mean()\n centre_y = coords[:, 1].mean()\n return centre_x, centre_y",
"def find_centroid_cell(self):\n\n x_min, y_min = self.find_min()\n x_max, y_max = self.find_max()\n x_centroid = int((x_max+x_min)/2)\n y_centroid = int((y_max+y_min)/2)\n centroide = x_centroid, y_centroid\n return centroide",
"def random_uniform_within_circle():\n rho = np.sqrt(np.random.uniform(0, 1))\n phi = np.random.uniform(0, 2 * np.pi)\n x = rho * np.cos(phi)\n y = rho * np.sin(phi)\n return np.array([x, y])",
"def get_x_y_from_center(center, angle):\n print \"center\", center\n size_of_img = (640, 480)\n alpha_x = angle + (center[1] - 0.5 * size_of_img[1]) * camera_y_angle / size_of_img[1] \n alpha_y = (center[0] - 0.5 * size_of_img[0]) * camera_x_angle / size_of_img[0] \n print \"angle y :\", alpha_y\n delta_x = height / math.tan(math.radians(alpha_x))\n d = math.sqrt(delta_x ** 2 + height ** 2)\n delta_y = d * math.sin(math.radians(alpha_y))\n return round(delta_x), round(delta_y)",
"def getCenter(self):\n return [self.tx/self.tw, self.ty/self.tw]",
"def _rad_center(self):\n return ((self.rad_hi + self.rad_lo) / 2).to(\"deg\")",
"def _calculate_actuators_across(self):\n \n self.unit_actuators = self.telescope_diameter / self.actuator_spacing",
"def PlayCentroid(self, actions, plr_coords):\n centroid_actions = []\n coords = plr_coords['b'] if self.id % 2 else plr_coords['r']\n if len(coords) == 0:\n x, y = 4.5, 4.5\n else:\n x, y = 0, 0\n for (r, c) in coords:\n x += r\n y += c\n x /= len(coords)\n y /= len(coords)\n for a in actions:\n if a['type'] == 'place' or a['type'] == 'remove':\n r, c = a['coords']\n if abs(r - x) <= 3 and abs(c - y) <= 3:\n centroid_actions.append(a)\n else:\n centroid_actions.append(a)\n if len(centroid_actions) > 0:\n actions = centroid_actions\n return actions",
"def arc(radius = 10, angle = 90, num_pts = 720):\n t = np.linspace(0, angle*np.pi/180, abs(int(num_pts*angle/360))-2)\n x = radius*np.cos(t)\n y = radius*np.sin(t)\n points = np.array((x,y)).T\n start_angle = 90*np.sign(angle)\n end_angle = start_angle + angle\n return points, start_angle, end_angle",
"def Intarea( xc, yc, r, x0, x1, y0, y1):\n\n#\n# Shift the objects so that the circle is at the origin.\n#\n x0 = x0 - xc\n y0 = y0 - yc\n x1 = x1 - xc\n y1 = y1 - yc\n\n return Oneside( x1, y0, y1, r ) + Oneside( y1, -x1, -x0, r ) +\\\n Oneside( -x0, -y1, -y0, r ) + Oneside( -y0, x0, x1, r )",
"def get_center_coords(antipodes):\n lat = antipodes[\"minx\"] + (antipodes[\"maxx\"] - antipodes[\"minx\"]) / 2\n lon = antipodes[\"miny\"] + (antipodes[\"maxy\"] - antipodes[\"miny\"]) / 2\n return [lon, lat]",
"def calculate_center_coordinates(self):\r\n coord_y = 320\r\n coord_x = -640\r\n distance_between_l = self.distance_between_layers()\r\n distance_between_n = self.distance_between_neurons()\r\n\r\n for layer in xrange(1, self.number_of_layers + 1):\r\n layer_data = []\r\n coord_x += distance_between_l\r\n\r\n for index_n, neuron in enumerate(xrange(1, self.number_of_neurons_in_layer + 1)):\r\n\r\n if index_n:\r\n coord_y -= distance_between_n\r\n else:\r\n coord_y = 320 # starting coordinates Y\r\n\r\n layer_data.append((coord_x, coord_y))\r\n\r\n self.central_coordinates[layer] = layer_data\r\n\r\n pprint(self.central_coordinates)\r\n self.calculate_outputs()",
"def centroids(img):\n _, _, _, centr = cv2.connectedComponentsWithStats(img)\n return centr[1:]",
"def _generate_coordinates(self):\n a0 = +0.2969\n a1 = -0.1260\n a2 = -0.3516\n a3 = +0.2843\n a4 = -0.1036 # zero thickness TE\n\n x = np.linspace(0.0, 1.0, num=self.n_points)\n\n if len(self.digits) == 4:\n # Returns n+1 points in [0 1] for the given 4-digits NACA string\n m = float(self.digits[0]) / 100.0\n p = float(self.digits[1]) / 10.0\n t = float(self.digits[2:]) / 100.0\n\n # half-thickness distribution\n yt = 5 * t * (a0 * np.sqrt(x) + a1 * x + a2 * np.power(x, 2) +\n a3 * np.power(x, 3) + a4 * np.power(x, 4))\n\n if p == 0:\n # Symmetric foil\n self.xup_coordinates = np.linspace(0.0, 1.0, num=self.n_points)\n self.yup_coordinates = yt\n self.xdown_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.ydown_coordinates = -yt\n else:\n # Cambered foil\n xc1 = np.asarray([xx for xx in x if xx <= p])\n xc2 = np.asarray([xx for xx in x if xx > p])\n yc1 = m / np.power(p, 2) * xc1 * (2 * p - xc1)\n yc2 = m / np.power(1 - p, 2) * (1 - 2 * p + xc2) * (1 - xc2)\n # Y-coordinates of camber line\n yc = np.append(yc1, yc2)\n\n if self.cosine_spacing:\n # points are generated according to cosine distribution of\n # the X-coordinates of the chord\n dyc1_dx = m / np.power(p, 2) * (2 * p - 2 * xc1)\n dyc2_dx = m / np.power(1 - p, 2) * (2 * p - 2 * xc2)\n dyc_dx = np.append(dyc1_dx, dyc2_dx)\n theta = np.arctan(dyc_dx)\n self.xup_coordinates = x - yt * np.sin(theta)\n self.yup_coordinates = yc + yt * np.cos(theta)\n self.xdown_coordinates = x + yt * np.sin(theta)\n self.ydown_coordinates = yc - yt * np.cos(theta)\n else:\n # Linear spacing distribution of the foil coordinates\n self.xup_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.xdown_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.yup_coordinates = yc + yt\n self.ydown_coordinates = yc - yt\n\n elif len(self.digits) == 5:\n # Returns n+1 points in [0 1] for the given 5-digits NACA string\n cld = float(self.digits[0]) * 0.15\n p = 5.0 * float(self.digits[1]) / 100.0\n s = float(self.digits[2])\n t = float(self.digits[3:]) / 100.0\n\n # half-thickness distribution\n yt = 5 * t * (a0 * np.sqrt(x) + a1 * x + a2 * np.power(x, 2) +\n a3 * np.power(x, 3) + a4 * np.power(x, 4))\n\n if s == 1:\n # Relfex camber\n P = np.array([0.1, 0.15, 0.2, 0.25])\n M = np.array([0.13, 0.2170, 0.318, 0.441])\n K = np.array([51.99, 15.793, 6.520, 3.191])\n elif s == 0:\n # Standard camber\n P = np.array([0.05, 0.1, 0.15, 0.2, 0.25])\n M = np.array([0.0580, 0.1260, 0.2025, 0.2900, 0.3910])\n K = np.array([361.4, 51.64, 15.957, 6.643, 3.230])\n else:\n raise ValueError(\n 'For NACA \"LPSTT\" the value of \"S\" can be either 0 or 1.')\n\n if p == 0:\n # Symmetric foil\n self.xup_coordinates = np.linspace(0.0, 1.0, num=self.n_points)\n self.yup_coordinates = yt\n self.xdown_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.ydown_coordinates = -yt\n else:\n # Cambered foil\n spl_m = splrep(P, M)\n spl_k = splrep(M, K)\n m = splev(p, spl_m)\n k1 = splev(m, spl_k)\n xc1 = np.asarray([xx for xx in x if xx <= m])\n xc2 = np.asarray([xx for xx in x if xx > m])\n yc1 = k1 / 6.0 * (np.power(xc1, 3) - 3 * m * np.power(xc1, 2) +\n np.power(m, 2) * (3 - m) * xc1)\n yc2 = k1 / 6.0 * np.power(m, 3) * (1 - xc2)\n yc = np.append(yc1, yc2)\n\n if self.cosine_spacing:\n # points are generated according to cosine distribution of\n # the X-coordinates of the chord\n zc = cld / 0.3 * yc\n dyc1_dx = 1.0 / 6.0 * k1 * (\n 3 * np.power(xc1, 2) - 6 * m * xc1 + np.power(m, 2) *\n (3 - m))\n dyc2_dx = np.tile(-1.0 / 6.0 * k1 * np.power(m, 3),\n len(xc2))\n dyc_dx = np.append(dyc1_dx, dyc2_dx)\n theta = np.arctan(dyc_dx)\n self.xup_coordinates = x - yt * np.sin(theta)\n self.yup_coordinates = zc + yt * np.cos(theta)\n self.xdown_coordinates = x + yt * np.sin(theta)\n self.ydown_coordinates = zc - yt * np.cos(theta)\n else:\n # Linear spacing distribution of the foil coordinates\n self.xup_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.xdown_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.yup_coordinates = yc + yt\n self.ydown_coordinates = yc - yt\n\n else:\n raise Exception",
"def find_center( contours ):\r\n ret = []\r\n\r\n for x in contours:\r\n M = cv2.moments( x )\r\n pt = Point()\r\n pt.x = int( M['m10']/M['m00'] )\r\n pt.y = int( M['m01']/M['m00'] )\r\n\r\n ret.append( pt )\r\n\r\n return( ret );",
"def centers_cartesian(self):\n polar_centers, azimuthal_centers = self.centers()\n x_centers, y_centers, z_centers = \\\n starwinds_magnetogram.coordinate_transforms.rectangular_coordinates_from_spherical(\n np.ones(polar_centers.shape),\n polar_centers,\n azimuthal_centers)\n\n return x_centers, y_centers, z_centers",
"def hough_circles_acc(img_orig, img_edges, radius, point_plus=True):\n\n '''\n if not point_plus:\n H = np.zeros_like(img_orig, dtype=np.uint8)\n r, c = H.shape\n edge_indices = np.nonzero(img_edges)\n for (row, col) in zip(*edge_indices):\n for theta in np.linspace(-math.pi, math.pi, math.ceil(radius*2*math.pi)+1):\n x_c = col + int(round(radius*math.cos(theta)))\n y_c = row + int(round(radius*math.sin(theta)))\n if x_c >= 0 and y_c >= 0 and x_c < c and y_c < r:\n H[y_c, x_c] += 1\n #else:\n return H\n '''\n H = np.zeros_like(img_orig, dtype=np.uint8)\n r, c = H.shape\n edge_indices = np.nonzero(img_edges)\n if not point_plus:\n for (row, col) in zip(*edge_indices):\n for theta in np.linspace(-math.pi, math.pi, math.ceil(radius*2*math.pi)+1):\n # Possible center of circle\n x_c = col + int(round(radius*math.cos(theta)))\n y_c = row + int(round(radius*math.sin(theta)))\n if x_c >= 0 and y_c >= 0 and x_c < c and y_c < r:\n H[y_c, x_c] += 1\n else:\n sobelx = cv2.Sobel(img_orig, cv2.CV_64F, 1, 0, ksize=5)\n sobely = cv2.Sobel(img_orig, cv2.CV_64F, 0, 1, ksize=5)\n for (row, col) in zip(*edge_indices):\n theta = math.atan2(sobely[row][col], sobelx[row][col])\n x_c = col + int(round(radius*math.cos(theta)))\n y_c = row + int(round(radius*math.sin(theta)))\n if x_c >= 0 and y_c >= 0 and x_c < c and y_c < r:\n H[y_c, x_c] += 1\n return H",
"def create_circle(radius=None, n_instance=None):\n del_theta = np.pi * 2/(n_instance)\n theta_list = np.linspace(0, np.pi * 2, n_instance)\n coordinates = [circular_movement(radius, theta) for theta in theta_list]\n return coordinates",
"def output(self):\n xpos, ypos = self.arcpoints[2]\n startxy = np.array([xpos, ypos]) # start point\n xpos, ypos = self.arcpoints[1]\n pointxy = np.array([xpos, ypos]) # a point on the curve\n xpos, ypos = self.arcpoints[0]\n endxy = np.array([xpos, ypos]) # end point\n\n a_norm = np.linalg.norm(endxy - pointxy)\n b_norm = np.linalg.norm(endxy - startxy)\n c_norm = np.linalg.norm(pointxy - startxy)\n \"\"\"\n s_factor = (a_norm + b_norm + c_norm) / 2\n radius = a_norm * b_norm * c_norm / 4\n / np.sqrt(s_factor * (s_factor - a_norm)\n * (s_factor - b_norm)\n * (s_factor - c_norm))\n \"\"\"\n b_factor1 = a_norm * a_norm * (b_norm * b_norm\n + c_norm * c_norm\n - a_norm * a_norm)\n b_factor2 = b_norm * b_norm * (a_norm * a_norm\n + c_norm * c_norm\n - b_norm * b_norm)\n b_factor3 = c_norm * c_norm * (a_norm * a_norm\n + b_norm * b_norm\n - c_norm * c_norm)\n centerxy = np.column_stack((startxy,\n pointxy,\n endxy)).dot(np.hstack((b_factor1,\n b_factor2,\n b_factor3)))\n centerxy /= b_factor1 + b_factor2 + b_factor3 # arc center\n\n self.def_field['XY_center'] = (centerxy)\n self.def_field['XY_arcpoints'].append(startxy) # start point\n self.def_field['XY_arcpoints'].append(endxy) # end point\n\n to_write = 'A '\n xpos, ypos = self.def_field['XY_center']\n\n to_write += str(int(xpos)) + ' ' + str(int(ypos)) + ' '\n to_write += str(self.def_field['radius']) + ' '\n to_write += str(self.def_field['angle1']) + ' '\n to_write += str(self.def_field['angle2']) + ' '\n to_write += str(self.def_field['unit']) + ' '\n to_write += str(self.def_field['convert']) + ' '\n to_write += str(self.def_field['width']) + ' '\n to_write += str(self.def_field['fill']) + ' '\n for xpos, ypos in self.def_field['XY_arcpoints']:\n to_write += str(self.offset[0] + xpos) + ' ' \\\n + str(self.offset[1] + ypos) + ' '\n to_write += '\\n'\n return to_write",
"def principal_axis(alpha_carbons):\n # alpha carbons coordinates as a numpy array\n coord = numpy.array(alpha_carbons, float)\n\n # get geometrical center\n center = numpy.mean(coord, 0)\n coord = coord - center\n\n # create inertia matrix and extract eigenvectors and values\n inertia = numpy.dot(coord.transpose(), coord)\n e_values, e_vectors = numpy.linalg.eig(inertia)\n\n # sort eigenvalues\n order = numpy.argsort(e_values)\n\n # axis1 is the principal axis with the greatest eigenvalue\n _, _, axis1 = e_vectors[:, order].transpose()\n\n axis_direction = axis1 / numpy.linalg.norm(axis1)\n\n return center, axis_direction",
"def get_center_coordinates(self):\n totalX = 0\n totalY = 0\n totalZ = 0\n for atom in self.get_atoms():\n totalX += atom.get_x()\n totalY += atom.get_y()\n totalZ += atom.get_z()\n \n xCenter = totalX / len(self.get_atoms())\n yCenter = totalY / len(self.get_atoms())\n zCenter = totalZ / len(self.get_atoms())\n \n return xCenter, yCenter, zCenter",
"def parametrized_circle(point_a, point_b, point_c, theta):\n radius, center = shortest_line_to_point(point_a, point_b, point_c)\n # print'center, radius \\n', center, radius\n center_axis = np.subtract(point_a, point_b)\n # print 'center axis %s , radius %s, center %s' % (center_axis, radius, center)\n # center_axis dot <1,1,z> = 0 returns perp vector\n in_plane = norm_vect(np.subtract(point_c, center))\n perp_1 = np.cross(center_axis, in_plane)\n perp_2 = np.cross(center_axis, perp_1)\n # print 'perp dick', perp_1, perp_2\n # norm perpendicular vectors\n perp_1 = norm_vect(perp_1)\n perp_2 = norm_vect(perp_2)\n if -1e-6 > np.dot(perp_1, perp_2) > 1e-6 or -1e-6 > (np.dot(perp_1, center_axis)) > 1e-6 or \\\n -1e-6 > np.dot(perp_2, center_axis) > 1e-6:\n print 'not perpendicular'\n # print np.dot(perp_1, perp_2), np.dot(perp_1, center_axis), np.dot(perp_2, center_axis)\n x = center[0] + (radius * math.cos(theta) * perp_2[0]) + (radius * math.sin(theta) * perp_1[0])\n y = center[1] + (radius * math.cos(theta) * perp_2[1]) + (radius * math.sin(theta) * perp_1[1])\n z = center[2] + (radius * math.cos(theta) * perp_2[2]) + (radius * math.sin(theta) * perp_1[2])\n return [x, y, z]",
"def ccsn_pixel_locations(size=100):\n\n L_UV_corr = UVimage_corrected()\n [x,y,width,height] = GordonSurveyArea()\n L_UV_corr_gord = L_UV_corr[y:y+height+1, x:x+width+1]\n xpix = np.arange(0,L_UV_corr_gord.shape[1]+1,1)\n ypix = np.arange(0,L_UV_corr_gord.shape[0]+1,1)\n\n#Calculate CDF per pixel\n p_UV = L_UV_corr_gord/np.sum(L_UV_corr_gord)\n cdf_UV = np.cumsum(p_UV).reshape(p_UV.shape)\n \n#Randomly select pixels\n snrs = np.random.random(size)\n xx,yy = np.meshgrid(xpix, ypix)\n loc_x = np.zeros_like(snrs)\n loc_y = np.zeros_like(snrs)\n for i,randcdf in enumerate(snrs):\n index = zip(*np.where(cdf_UV<=randcdf))\n loc_x[i] = xx[index[-1]]\n loc_y[i] = yy[index[-1]]\n \n return (loc_x, loc_y)",
"def _rect_to_cyl_coords(self, x, y):\n theta = (np.pi * y) / (self.arch_radius * 2)\n y = self.arch_radius * np.sin(theta)\n z = self.arch_radius * np.cos(theta)\n return np.array([x, y, z])",
"def perform_photometry_window_centroid(self,xcen,ycen,r_aper=34.,r_annulus1=60.,r_annulus2=90.,\n box_size=80.,method=\"howell\"):\n if (r_annulus1 < r_aper) ^ (r_annulus2 < r_aper):\n raise Exception, '(r_annulus1 < r_aper) ^ (r_annulus2 < r_aper)'\n \n box_size = int(box_size)\n self.new_xpos = np.zeros(len(xcen))\n self.new_ypos = np.zeros(len(ycen))\n \n # Recentroid on each of the reference star\n for i in range(len(xcen)):\n self.new_xpos[i], self.new_ypos[i] = self.get_centroid_cutout(xcen[i],ycen[i],box_size=box_size,method=method,plot=False,plot_full=False)\n \n # Save new centroids\n self.positions = np.vstack([self.new_xpos,self.new_ypos]).T\n \n # Create apertures\n self.apertures = CircularAperture2(self.positions,r=r_aper)\n self.annulus_apertures = CircularAnnulus2(self.positions, r_in=r_annulus1,r_out=r_annulus2)\n apers = [self.apertures, self.annulus_apertures]\n \n # Perform photometry\n self.phot_table = aperture_photometry(self.data,apers,method=\"exact\",subpixels=5)\n \n self.phot_table[\"Peak\"] = self.apertures.get_peak_in_aperture(self.data)\n self.data_cutouts_aper = self.apertures.get_data_cutouts(self.data)\n self.data_cutouts_annulus = self.annulus_apertures.get_data_cutouts(self.data)\n \n # \n bkg_mean = self.phot_table['aperture_sum_1'] / self.annulus_apertures.area()\n bkg_sum = bkg_mean * self.apertures.area()\n final_sum = self.phot_table['aperture_sum_0'] - bkg_sum\n \n self.phot_table[\"final_sum\"] = final_sum\n self.phot_table[\"bkg_mean\"] = bkg_mean\n \n # master dataframe\n df = pd.DataFrame(index=[1]) # only one row\n\n df[\"Label\"] = self.fimg.header[\"FILENAME\"]\n df[\"JD_UTC\"] = astropy.time.Time(self.fimg.header[\"DATE-OBS\"],format=\"isot\").jd + (self.fimg.header[\"EXPTIME\"]/2.)/(24.*3600.)\n df[\"JD_SOBS\"] = astropy.time.Time(self.fimg.header[\"DATE-OBS\"],format=\"isot\").jd\n df[\"AIRMASS\"] = self.fimg.header[\"AIRMASS\"]\n df[\"TELAZ\"] = self.fimg.header[\"TELAZ\"]\n df[\"TELALT\"] = self.fimg.header[\"TELAZ\"]\n df[\"TELROT\"] = self.fimg.header[\"TELROT\"]\n \n # Loop over the stars\n for i in range(len(xcen)):\n if i==0:\n TorC = \"T\"\n else:\n TorC = \"C\"\n df[\"X(FITS)_\"+TorC+str(i+1)] = self.phot_table[\"xcenter\"][i].value\n df[\"Y(FITS)_\"+TorC+str(i+1)] = self.phot_table[\"ycenter\"][i].value\n df[\"Sky/Pixel_\"+TorC+str(i+1)] = self.phot_table[\"bkg_mean\"][i]\n df[\"Source-Sky_\"+TorC+str(i+1)]= self.phot_table[\"final_sum\"][i]\n df[\"Peak_\"+TorC+str(i+1)] = self.phot_table[\"Peak\"][i]\n \n self.source_sky_C_keys = [key for key in df.keys() if \"Source-Sky_C\" in key]\n self.sky_pixel_keys = [key for key in df.keys() if \"Sky/Pixel\" in key]\n df[\"tot_C_cnts\"] = df[self.source_sky_C_keys].sum().sum()\n df[\"rel_flux_T1\"]= df[\"Source-Sky_T1\"]/df[\"tot_C_cnts\"]\n \n for i in range(len(xcen)):\n if i==0:\n TorC = \"T\"\n else:\n TorC = \"C\"\n df[\"rel_flux_\"+TorC+str(i+1)]=df[\"Source-Sky_\"+TorC+str(i+1)]/df[\"tot_C_cnts\"]\n \n peak_flux = np.max(df[[key for key in df.keys() if \"Peak\" in key]].values)\n if peak_flux > self.saturation_warning_threshold:\n df[\"Saturated\"] = peak_flux\n else:\n df[\"Saturated\"] = 0.\n return df",
"def all(self, x, y, Rs, rho0, r200=100, center_x=0, center_y=0, angle=False):\n if angle is True:\n rho0_input = self.alpha2rho0(phi_E=rho0, Rs=Rs)\n else:\n rho0_input = rho0\n if Rs < 0.0001:\n Rs = 0.0001\n x_ = x - center_x\n y_ = y - center_y\n R = np.sqrt(x_**2 + y_**2)\n f_ = self.nfwPot(R, Rs, rho0_input, r200)\n f_x, f_y = self.nfwAlpha(R, Rs, rho0_input, r200, x_, y_)\n kappa = self.nfw2D(R, Rs, rho0_input, r200)\n gamma1, gamma2 = self.nfwGamma(R, Rs, rho0_input, r200, x_, y_)\n f_xx = kappa + gamma1\n f_yy = kappa - gamma1\n f_xy = gamma2\n return f_, f_x, f_y, f_xx, f_yy, f_xy",
"def getCircleCircumscribed(self):\n p1, p2, p3 = self.points\n a1 = - (p2.x - p1.x) / (p2.y - p1.y)\n b1 = (p2.x ** 2 - p1.x ** 2 + p2.y ** 2 - p1.y ** 2) / (2 * (p2.y - p1.y))\n a2 = - (p3.x - p2.x) / (p3.y - p2.y)\n b2 = (p3.x ** 2 - p2.x ** 2 + p3.y ** 2 - p2.y ** 2) / (2 * (p3.y - p2.y))\n x = (b1 - b2) / (a2 - a1)\n y = a1 * x + b1\n radius = math.hypot(p1.x - x, p1.y - y)\n return Circle(x, y, radius=radius)",
"def centers(self):\n def make_centers(x):\n return 0.25 * (x[:-1, :-1] + x[:-1, 1:] + x[1:, :-1] + x[1:, 1:])\n\n polar_centers = make_centers(self.polar_corners)\n azimuthal_centers = make_centers(self.azimuthal_corners)\n\n assert azimuthal_centers.shape == polar_centers.shape\n return polar_centers, azimuthal_centers",
"def circle_from_points(a, b, c):\n ab = subtract_vectors(b, a)\n cb = subtract_vectors(b, c)\n ba = subtract_vectors(a, b)\n ca = subtract_vectors(a, c)\n ac = subtract_vectors(c, a)\n bc = subtract_vectors(c, b)\n normal = normalize_vector(cross_vectors(ab, ac))\n d = 2 * length_vector_sqrd(cross_vectors(ba, cb))\n A = length_vector_sqrd(cb) * dot_vectors(ba, ca) / d\n B = length_vector_sqrd(ca) * dot_vectors(ab, cb) / d\n C = length_vector_sqrd(ba) * dot_vectors(ac, bc) / d\n Aa = scale_vector(a, A)\n Bb = scale_vector(b, B)\n Cc = scale_vector(c, C)\n center = add_vectorlist([Aa, Bb, Cc])\n radius = distance_point_point(center, a)\n return center, radius, normal",
"def get_polar_coordinates(cup_position, bot_position):\n\n distance_x = cup_position[0] - bot_position[0]\n distance_y = cup_position[1] - bot_position[1]\n\n r = math.hypot(distance_x, distance_y)\n theta = math.degrees(math.atan(distance_y/distance_x))\n\n return r, theta",
"def find_center(self):\n return(Point(self.corner.x + self.width/2.0, self.corner.y + self.height/2.0))",
"def Calc_axe_spheroid(r,c):\n return np.sqrt((r**3)/c)",
"def get_center_of_mass_allies(self,obs):",
"def center(self):\n\n ca_atoms = self.ca_atoms\n ca_atom_vectors = ca_atoms[\"ca.atom\"].to_list()\n ca_atom_vectors = [i for i in ca_atom_vectors if i is not None]\n centroid = self.center_of_mass(ca_atom_vectors, geometric=False)\n centroid = Vector(centroid)\n\n return centroid",
"def Arc( x, y0, y1, r):\n return 0.5 * r*r * ( np.arctan( (y1).astype(float)/(x).astype(float) ) - np.arctan( (y0).astype(float)/(x).astype(float) ) )",
"def cells_centroid_py(self):\n A=self.cells_area()\n cxy=np.zeros( (self.Ncells(),2), np.float64)\n\n refs=self.nodes['x'][self.cells['nodes'][:,0]]\n\n all_pnts=self.nodes['x'][self.cells['nodes']] - refs[:,None,:]\n\n for c in np.nonzero(~self.cells['deleted'])[0]:\n nodes=self.cell_to_nodes(c)\n\n i=np.arange(len(nodes))\n ip1=(i+1)%len(nodes)\n nA=all_pnts[c,i]\n nB=all_pnts[c,ip1]\n\n tmp=(nA[:,0]*nB[:,1] - nB[:,0]*nA[:,1])\n cxy[c,0] = ( (nA[:,0]+nB[:,0])*tmp).sum()\n cxy[c,1] = ( (nA[:,1]+nB[:,1])*tmp).sum()\n cxy /= 6*A[:,None] \n cxy += refs\n return cxy",
"def prada(self):\n scale_factor = 1.0 / (1.0 + self.snapshot.header.redshift)\n r200c_physical = self.r200c * scale_factor / 1000.0 # units Mpc\n\n v200 = (\n (self.snapshot.const.G * self.m200c)\n / r200c_physical\n * self.snapshot.const.Mpc ** 2\n / 1000.0 ** 2\n ) ** 0.5 # units km/s\n\n def y(x, vmax, v200):\n func = np.log(1 + x) - (x / (1 + x))\n return ((0.216 * x) / func) ** 0.5 - (vmax / v200)\n\n concentration = np.zeros((len(self.vmax)))\n for halo in range(self.N_halos):\n if v200[halo] > self.vmax[halo]:\n concentration[halo] = -9999.0\n else:\n try:\n concentration[halo] = newton(\n y, x0=5.0, args=(self.vmax[halo], v200[halo])\n )\n except:\n concentration[halo] = -9999.0\n\n return concentration",
"def center(self):\n return self.center_x, self.center_y",
"def get_pixel_rad_theta_vals(pix_x, pix_y, center_x, center_y):\n relative_x = pix_x - center_x\n relative_y = pix_y - center_y\n\n radius = (relative_x ** 0.5 + relative_y ** 0.5) ** 0.5\n\n if relative_x == 0:\n theta = 0\n else:\n theta = math.atan(abs(relative_y / relative_x))\n\n if relative_x < 0 < relative_y:\n theta += math.pi / 2\n elif relative_x < 0 and relative_y < 0:\n theta += math.pi\n elif relative_x > 0 > relative_y:\n theta += math.pi * 1.5\n\n return radius, theta",
"def calculate_centers_of_mass(x_all, y_all):\n num_of_frames, num_of_rafts = x_all.shape\n\n x_centers = x_all[:, 0:num_of_rafts].mean(axis=1)\n y_centers = y_all[:, 0:num_of_rafts].mean(axis=1)\n\n x_relative_to_centers = x_all - x_centers[:, np.newaxis]\n y_relative_to_centers = y_all - y_centers[:, np.newaxis]\n\n distances_to_centers = np.sqrt(x_relative_to_centers ** 2 + y_relative_to_centers ** 2)\n\n orbiting_angles = np.arctan2(y_relative_to_centers, x_relative_to_centers) * 180 / np.pi\n\n return distances_to_centers, orbiting_angles, x_centers, y_centers",
"def calc_R(x: npt.NDArray, y: npt.NDArray, xc: float, yc: float) -> npt.NDArray:\n return np.sqrt((x - xc) ** 2 + (y - yc) ** 2)",
"def get_component_centers(atoms, unwrap=False):\n if unwrap:\n atoms = unwrap_atoms_from_cell(atoms)\n pos = atoms.get_positions()\n masses = atoms.get_masses()\n components = get_connected_components(pos, atoms.get_chemical_symbols())\n centers = []\n for c in components:\n centers.append(np.dot(masses[c], pos[c]) / masses[c].sum())\n return np.array(centers)"
] | [
"0.6132837",
"0.5954457",
"0.5869159",
"0.5858206",
"0.5853505",
"0.5796136",
"0.57292473",
"0.5709713",
"0.5697524",
"0.5693983",
"0.56876165",
"0.56802106",
"0.56581277",
"0.56567794",
"0.5652664",
"0.5648251",
"0.56471676",
"0.5623604",
"0.561589",
"0.56110007",
"0.5595283",
"0.5580179",
"0.5579294",
"0.55617416",
"0.5560451",
"0.55415034",
"0.55311775",
"0.5514491",
"0.550758",
"0.54843956",
"0.5479477",
"0.54732",
"0.54703695",
"0.54663676",
"0.5445479",
"0.5443282",
"0.544106",
"0.5431692",
"0.5428144",
"0.5419287",
"0.54154044",
"0.53999454",
"0.539369",
"0.5382064",
"0.53814733",
"0.5372419",
"0.53721935",
"0.5371645",
"0.536304",
"0.53598326",
"0.5333784",
"0.53316706",
"0.53196126",
"0.5316284",
"0.5310257",
"0.53075206",
"0.53009325",
"0.52998525",
"0.52818793",
"0.52761114",
"0.52687716",
"0.52634674",
"0.5258894",
"0.5258892",
"0.5258849",
"0.5242651",
"0.52403766",
"0.52398056",
"0.52382165",
"0.52361935",
"0.5235959",
"0.5226184",
"0.52238727",
"0.5221976",
"0.5218752",
"0.5210694",
"0.52071637",
"0.52057284",
"0.5205551",
"0.51945335",
"0.51944476",
"0.5186301",
"0.5181734",
"0.5181523",
"0.5178804",
"0.51781577",
"0.51760566",
"0.51743585",
"0.5172376",
"0.5165838",
"0.51535743",
"0.51534545",
"0.51491493",
"0.51443344",
"0.5144275",
"0.51437247",
"0.51286167",
"0.5125901",
"0.5119263",
"0.51086634"
] | 0.69552195 | 0 |
Compute the PEAK of the PSF without aberrations so that we can normalize everything by it | def peak_PSF(self):
im, strehl = self.compute_PSF(np.zeros(self.N_act))
return strehl | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pulp_smash():",
"def peak_PSF(self):\n return self.compute_PSF(np.zeros(self.N_zern))",
"def cal_pn(grams_set, grams, candidate, reference):\n count = 0\n for gram in grams_set:\n # print(gram)\n count += count_clip(gram, grams, reference)\n # calculate log() for p, so '+10**-8' avoid 'p==0'\n p = count / len(grams) + 10**-8 \n return p",
"def f_UPPS_pc(v, P_0, r_f, d, s, T, wealth, phi, n_s, n_o, K):\n W_T = f_W_T_pc(v, P_0, r_f, d, s, T, wealth, phi, n_s, n_o, K)\n value = pow(W_T, -gamma) * f_W_T_to_P_T_pc(v, P_0, r_f, d, s, T, wealth, phi, n_s, n_o, K) * f_P_T_to_P_0(v, r_f, d, s, T)\n return value",
"def precompute_scoring():\n global volume_void_inclusion\n global attract_point_distances\n global perlin_values\n \n volume_void_inclusion = []\n for i,void in enumerate(volumes_void):\n inclusion = gh.PointInBrep(void,points_input,False)\n volume_void_inclusion.append(inclusion)\n \n attract_point_distances = []\n for i,point in enumerate(points_attractor):\n distances = gh.Division(gh.Distance(point,points_input),max_dist)\n attract_point_distances.append(distances)",
"def _get_single_PSF(SCA, bandpass, SCA_pos, pupil_bin,\n n_waves, extra_aberrations, wavelength,\n pupil_plane_type, gsparams):\n from .. import OpticalPSF, ChromaticOpticalPSF\n from . import diameter\n from ..bandpass import Bandpass\n from .roman_bandpass import getBandpasses\n\n if wavelength is None:\n wave = zemax_wavelength\n elif isinstance(wavelength, Bandpass):\n wave = wavelength = wavelength.effective_wavelength\n else:\n wave = wavelength\n\n # All parameters relevant to the aperture. We may be able to use a cached version.\n aper = _make_aperture(SCA, pupil_plane_type, pupil_bin, wave, gsparams)\n\n # Start reading in the aberrations for that SCA\n aberrations, x_pos, y_pos = _read_aberrations(SCA)\n # Do bilinear interpolation, unless we're exactly at the center (default).\n use_aberrations = _interp_aberrations_bilinear(aberrations, x_pos, y_pos, SCA_pos)\n\n if extra_aberrations is not None:\n use_aberrations[:len(extra_aberrations)] += extra_aberrations\n # We don't want to use piston, tip, or tilt aberrations. The former doesn't affect the\n # appearance of the PSF, and the latter cause centroid shifts. So, we set the first 4\n # numbers (corresponding to a place-holder, piston, tip, and tilt) to zero.\n use_aberrations[0:4] = 0.\n\n # Now set up the PSF, including the option to interpolate over waves\n if wavelength is None:\n PSF = ChromaticOpticalPSF(lam=zemax_wavelength,\n diam=diameter, aberrations=use_aberrations,\n aper=aper, gsparams=gsparams)\n if n_waves is not None:\n # To decide the range of wavelengths to use, check the bandpass.\n bp_dict = getBandpasses()\n bp = bp_dict[bandpass]\n PSF = PSF.interpolate(waves=np.linspace(bp.blue_limit, bp.red_limit, n_waves),\n oversample_fac=1.5)\n else:\n tmp_aberrations = use_aberrations * zemax_wavelength / wavelength\n PSF = OpticalPSF(lam=wavelength, diam=diameter,\n aberrations=tmp_aberrations,\n aper=aper, gsparams=gsparams)\n\n return PSF",
"def test_nuke_psfs():\n # Without multiprocessing\n mt.nuke_psfs(mprocessing=False)\n\n # With multiprocessing\n mt.nuke_psfs()",
"def calculate_gmpe(src_keys, station, output_file, rrups, gmpe_group_name):\n gmpe_group = gmpe_config.GMPES[gmpe_group_name]\n origin = (src_keys['lon_top_center'], src_keys['lat_top_center'])\n dims = (src_keys['fault_length'], src_keys['dlen'],\n src_keys['fault_width'], src_keys['dwid'],\n src_keys['depth_to_top'])\n mech = (src_keys['strike'], src_keys['dip'], src_keys['rake'])\n\n # Station location\n site_geom = [float(station.lon), float(station.lat), 0.0]\n (fault_trace1, upper_seis_depth,\n lower_seis_depth, ave_dip,\n dummy1, dummy2) = putils.FaultTraceGen(origin, dims, mech)\n rjb, rrup, rx = putils.DistanceToSimpleFaultSurface(site_geom,\n fault_trace1,\n upper_seis_depth,\n lower_seis_depth,\n ave_dip)\n\n print \"station: %s, Rrup: %f\" % (station.scode, rrup)\n rrups.append(rrup)\n\n vs30 = 1000\n z10 = None # Let PyNGA calculate it\n z25 = None # Let PyNGA calculate it\n\n # Compute PSA for this stations\n station_median = []\n for period in gmpe_group[\"periods\"]:\n period_medians = []\n for nga_model in gmpe_group[\"models\"]:\n median = gmpe_config.calculate_gmpe(gmpe_group_name,\n nga_model,\n src_keys['magnitude'],\n rjb, vs30,\n period,\n rake=src_keys['rake'],\n dip=src_keys['dip'],\n W=src_keys['fault_width'],\n Ztor=src_keys['depth_to_top'],\n Rrup=rrup, Rx=rx,\n Z10=z10, Z25=z25)\n period_medians.append(median)\n station_median.append((period, period_medians))\n\n # Create label\n file_label = \"\"\n for nga_model in gmpe_group[\"models\"]:\n file_label = \"%s %s\" % (file_label, nga_model)\n # Output data to file\n outfile = open(output_file, 'w')\n outfile.write(\"#station: %s\\n\" % (station.scode))\n outfile.write(\"#period%s\\n\" % (file_label))\n for item in station_median:\n period = item[0]\n vals = item[1]\n out_str = \"%.4f\" % (period)\n for method in vals:\n out_str = out_str + \"\\t%.6f\" % (method)\n outfile.write(\"%s\\n\" % (out_str))\n outfile.close()\n\n # Return list\n return station_median",
"def ppf(self,x):\n # TODO speed this up by doing it in Crow, not in python\n if hasattr(x,'__len__'):\n returnPpf = np.array([self.ppf(i) for i in x])\n else:\n returnPpf = self._distribution.inverseCdf(x)\n return returnPpf",
"def ppf(self,x):\n return self.categoricalDist.ppf(x)",
"def compute_PSSM_self_information(p):\n return -sp.sum(p*sp.log(p))",
"def compute_PSF(self, coef, crop=True):\n\n phase = np.dot(self.RBF_mat, coef) + self.defocus\n\n pupil_function = self.pupil_mask * np.exp(2 * np.pi * 1j * phase)\n image = (np.abs(fftshift(fft2(pupil_function))))**2\n\n try:\n image /= self.PEAK\n\n except AttributeError:\n # If self.PEAK is not defined, self.compute_PSF will compute the peak\n pass\n\n strehl = np.max(image)\n\n if crop:\n image = image[self.minPix:self.maxPix, self.minPix:self.maxPix]\n else:\n pass\n return image, strehl",
"def calc_prob_local(self, *args):\n return 0",
"def getpval(teststat, statlist):\n \n propzero = 0\n bootvals = []\n for val in statlist:\n if val == 0:\n propzero += 1\n else:\n bootvals.append(val)\n \n propzero = float(propzero) / len(statlist)\n \n shapeinit = getstartingshape(statlist)\n \n shape = optimiselike(getlikeweibull, bootvals, shapeinit)\n scale = (sum(bootvals) / len(bootvals)) / scipy.special.gamma(1 + 1/shape)\n \n pvalue = math.exp(- (teststat/scale) ** shape)\n \n return pvalue * (1 - propzero)",
"def do_pnp(pts3d_for_pnp, pts2d_for_pnp, K, iterations=200, reprojThresh=5):\n list_pts3d_for_pnp = pts3d_for_pnp\n list_pts2d_for_pnp = pts2d_for_pnp\n pts3d_for_pnp = np.array(pts3d_for_pnp)\n # pts2d_for_pnp = np.expand_dims(np.squeeze(np.array(pts2d_for_pnp)), axis=1)\n # print(pts3d_for_pnp)\n # print(pts2d_for_pnp.shape)\n num_pts = len(pts3d_for_pnp)\n print(num_pts)\n highest_inliers = 0\n for j in range(iterations):\n pt_idxs = np.random.choice(num_pts, 6, replace=False)\n pts3 = np.array([pts3d_for_pnp[pt_idxs[i]] for i in range(len(pt_idxs))])\n # print(\"pts\",pts3)\n pts2 = np.array([pts2d_for_pnp[pt_idxs[i]] for i in range(len(pt_idxs))])\n _, rvec, tvec = cv2.solvePnP(pts3, pts2, K, distCoeffs=np.array([]), flags=cv2.SOLVEPNP_ITERATIVE)\n R, _ = cv2.Rodrigues(rvec)\n pnp_errors, projpts, avg_err, perc_inliers = test_reproj_pnp_points(list_pts3d_for_pnp, list_pts2d_for_pnp, R, tvec, K, rep_thresh=reprojThresh)\n if highest_inliers < perc_inliers:\n highest_inliers = perc_inliers\n best_R = R\n best_tvec = tvec\n R = best_R\n tvec = best_tvec\n # print('rvec:', rvec,'\\n\\ntvec:', tvec)\n print(\"avg\",avg_err)\n print(\"inlier\",perc_inliers)\n return R, tvec",
"def find_mpe(fbn, sbn, compat, beta, e):\n evars = set(e)\n freevars = [v for v in fbn.V if v.name not in evars]\n\n # para instanaciar las variables splitted primero. Ver popsition 1\n # del paper\n freevars.sort(key=lambda x: x.name in compat) \n \n t = datetime.now()\n ac = dnnf.todnnf(sbn)\n print datetime.now() - t\n print \"dfs\", freevars\n def dfs(q, varsleft, z, k):\n \"\"\"\n q: cota actual\n varsleft: variables que faltan por instanciar. Se sacan del final.\n z: instanciacion parcial actual\n k: numero de variables splitted que falta por instanciar\n \"\"\"\n var = varsleft.pop()\n varname = var.name\n domain = var.Domain\n k -= 1\n clones = []\n if varname in compat:\n for clone in compat[varname]:\n clones.append(clone)\n\n # probar todos sus posibles valores\n for value in domain:\n # agregar ese valor a la instancia parcial\n z[varname] = value\n for clone in clones:\n z[clone] = value\n p = ac.mpe(z)\n\n if varsleft:\n # si todavia quedan variables por asignar\n # hacer prune si podemos\n \n if k<=0:\n # ya todas las variables splitted estan\n # asignadas. Ahora el MPE(sbn) = MPE(fbn), no hace\n # falta hacer mas asignaciones para obtener el\n # valor exacto (Proposicion 1 del paper)\n q = max(q, beta*p)\n else:\n if p*beta <= q:\n # la cota superior sobre sbc es menor que la\n # cota inferior q que llevamos. Por aqui no\n # hay nada mejor\n continue\n else:\n # todavia puede haber algo bueno por aqui\n q = max(q, dfs(q, varsleft, z, k))\n else:\n # si no queda ninguna variable por asignar.\n # por un teorema, el MPE(fbn, x) == beta*MPE(sbn, x)\n q = max(q, beta*p)\n\n # regresar todo al estado orignal\n varsleft.append(var)\n del z[varname]\n for clone in clones:\n del z[clone]\n return q\n\n return dfs(0.0, freevars, e, len(compat))",
"def test_AFQ_pft():\n _, bids_path, sub_path = get_temp_hardi()\n\n bundle_names = [\"SLF\", \"ARC\", \"CST\", \"FP\"]\n\n f_pve_csf, f_pve_gm, f_pve_wm = get_fnames('stanford_pve_maps')\n os.rename(f_pve_wm, op.join(sub_path, \"sub-01_ses-01_WMprobseg.nii.gz\"))\n os.rename(f_pve_gm, op.join(sub_path, \"sub-01_ses-01_GMprobseg.nii.gz\"))\n os.rename(f_pve_csf, op.join(sub_path, \"sub-01_ses-01_CSFprobseg.nii.gz\"))\n\n stop_mask = PFTMask(\n MaskFile(\"WMprobseg\"),\n MaskFile(\"GMprobseg\"),\n MaskFile(\"CSFprobseg\"))\n\n my_afq = api.AFQ(\n bids_path,\n dmriprep='vistasoft',\n bundle_info=bundle_names,\n tracking_params={\n \"stop_mask\": stop_mask,\n \"stop_threshold\": \"CMC\",\n \"tracker\": \"pft\"\n })\n my_afq.export_streamlines()",
"def ps(image):\n\timage = image.astype(float)\n\tps_img = abs(pow(fft2(image), 2))\n\treturn ps_img",
"def calculate_precinct_score(pt, dstrct):\n return pt.F(dstrct)",
"def calculate_prp(self, ref_point: np.ndarray, f_current: np.ndarray) -> np.ndarray:\n\n # distance\n d = np.linalg.norm(np.atleast_2d(ref_point - f_current))\n\n # unit vectors\n ei = np.array([np.zeros(len(ref_point))])\n es = np.repeat(ei, len(ref_point), axis=0)\n\n for i, j in enumerate(es):\n for ind, _ in enumerate(j):\n if ind == i:\n j[ind] = 1\n\n return ref_point + (d * es)",
"def pdb2pka_sugelm(self):\n import Protool\n P=Protool.structureIO()\n P.readpdb(self.pdbfile)\n P.RemoveALT()\n #import Protool.mutate\n #MUT=Protool.mutate.Mutate(P)\n #\n # Construct arrays\n #\n import pKD_dict\n self.data=pKD_dict.pKD_dict()\n self.atom_data=pKD_dict.pKD_dict()\n #\n # Create dir for mutant PDB files\n #\n import os\n mutdir=os.path.join(self.topdir,self.pdbfile+'.pdbs')\n if not os.path.isdir(mutdir):\n os.mkdir(mutdir)\n #\n # Loop over all residues\n #\n residues=P.residues.keys()\n residues.sort()\n for residue in residues:\n orgres=P.resname(residue)\n print 'Calculating for %s %s' %(residue,P.resname(residue))\n #\n # If neutral mutate to Asp, Glu, Lys, Arg, His\n #\n targets=[]\n for res in ['ARG','LYS','HIS','ASP','GLU']:\n if P.resname(residue)!=res:\n targets.append(res)\n #if orgres=='GLU':\n # targets.append('GLN')\n #elif orgres=='ASP':\n # targets.append('ASN')\n #elif orgres=='HIS':\n # targets.append('PHE')\n #elif orgres=='ARG' or P.resname(residue)=='LYS':\n # targets.append('MET')\n #\n # Target identified. Now model each\n #\n for target in targets:\n import pKD_tools\n resid=pKD_tools.get_resid_from_res(residue)\n orgres=P.resname(residue)\n filename=os.path.join(mutdir,'%s:%s:%s.pdb' %(residue,orgres,target))\n mutation='%s:%s:%s' %(residue,orgres,target)\n if not os.path.isfile(filename):\n import Design_pKa_help\n Design_pKa_help.make_mutation(self.pdbfile,mutation)\n NP=Protool.structureIO()\n NP.readpdb(filename)\n NP.writepdb(filename,TER=None)\n #\n # Calculate the interaction energies\n #\n protein,routines,forcefield,apbs_setup,lig_titgrps = pdb2pka.pre_init(pdbfilename=filename,\n ff='parse',\n ligand=None,\n verbose=1)\n mypkaRoutines = pdb2pka.pKaRoutines(protein, routines, forcefield,apbs_setup)\n #\n # Find our group\n #\n sp=residue.split(':')\n chainid=sp[0]\n resnum=int(sp[1])\n mypkaRoutines.findTitratableGroups()\n this_pKa=None\n for pKa in mypkaRoutines.pKas:\n print pKa.residue.resSeq,resnum\n print pKa.residue.chainID,chainid\n print pKa.residue.name,target\n print pKa.pKaGroup.name,target\n print '--------------'\n print 'ChainID',pKa.residue.chainID\n if pKa.residue.resSeq==resnum and pKa.residue.chainID==chainid and pKa.residue.name==target and pKa.pKaGroup.name==target:\n #print 'Found group',pKa.residue.resSeq,pKa.pKaGroup.name\n this_pKa=pKa\n break\n if not this_pKa:\n raise Exception,'Could not find inserted titratable group'\n mypkaRoutines.get_interaction_energies_setup(this_pKa,mode='pKD')\n matrix=mypkaRoutines.matrix\n #\n # Dig the interaction energies out of the pdb2pka array\n #\n for titration1 in matrix[this_pKa].keys():\n for state1 in matrix[this_pKa][titration1].keys():\n grp_sub=matrix[this_pKa][titration1][state1]\n if mypkaRoutines.is_charged(this_pKa,titration1,state1):\n for pKa2 in grp_sub.keys(): \n import string\n chainID2=pKa.residue.chainID\n resid2='%s:%s' %(chainID2,string.zfill(pKa2.residue.resSeq,4))\n for titration2 in grp_sub[pKa2].keys():\n for state2 in grp_sub[pKa2][titration2].keys():\n if mypkaRoutines.is_charged(pKa2,titration2,state2):\n #\n # Both states are charged, so now we can pull the\n # interaction energies out\n #\n if not self.data.has_key(mutation):\n self.data[mutation]={}\n self.data[mutation][resid2]=grp_sub[pKa2][titration2][state2]\n #\n # Get the potentials at all atoms too\n #\n all_pots=mypkaRoutines.all_potentials[this_pKa][titration1][state1]\n sub_all_pots=all_pots[pKa2][titration2][state2]\n for atom in sub_all_pots.keys():\n resid=mutation\n import pKD_tools\n resid2=pKD_tools.get_resid_from_res(atom)\n atomname=atom.split(':')[-1] #atom.name\n if atomname[0]=='H' or atomname in ['N','C','O']:\n continue # Skip all H atoms and all non-CA backbone atoms to save memory\n if not self.atom_data.has_key(resid):\n self.atom_data[resid]={}\n if not self.atom_data[resid].has_key(resid2):\n self.atom_data[resid][resid2]={}\n self.atom_data[resid][resid2][atomname]=abs(sub_all_pots[atom])\n return self.data,self.atom_data",
"def compute_pvalue(self):\n # Run permutation test\n self.PermutationTest()\n # TS obtained from the original B,T samples\n self.compute_obs_TS()\n \n # Mean and std of the TS distribution\n self.mu = np.mean(self.TS_tilde)\n self.sigma = np.std(self.TS_tilde)\n \n # Standardized test statistic (zero mean, unit variance)\n self.TS_prime = (self.TS_tilde - self.mu)/self.sigma\n self.TS_prime_obs = (self.TS_obs - self.mu)/self.sigma\n \n # Two-sided p-value from TS' distribution\n self.p_value = 2*(1 - 0.01 * stats.percentileofscore(self.TS_prime,\n abs(self.TS_prime_obs)) )\n \n # if 0, compute it from standard normal\n if self.p_value == 0.0:\n self.p_value = self.pvalue_gaussian()\n \n print(\"\")\n print(\"p-value = {:e}\".format(self.p_value))",
"def __init__(self, peak, pki, parent, multiColorPeak=None):\n # Peak object\n self.peak = peak\n # int, peak index number\n self.pki = pki\n self.parent = parent\n self.multiColorPeak = multiColorPeak\n # union of all the ways of failing...\n self.skip = False\n\n self.outOfBounds = False\n self.tinyFootprint = False\n self.noValidPixels = False\n self.deblendedAsPsf = False\n self.degenerate = False\n\n # Field set during _fitPsf:\n self.psfFitFailed = False\n self.psfFitBadDof = False\n # (chisq, dof) for PSF fit without decenter\n self.psfFit1 = None\n # (chisq, dof) for PSF fit with decenter\n self.psfFit2 = None\n # (chisq, dof) for PSF fit after applying decenter\n self.psfFit3 = None\n # decentered PSF fit wanted to move the center too much\n self.psfFitBigDecenter = False\n # was the fit with decenter better?\n self.psfFitWithDecenter = False\n #\n self.psfFitR0 = None\n self.psfFitR1 = None\n self.psfFitStampExtent = None\n self.psfFitCenter = None\n self.psfFitBest = None\n self.psfFitParams = None\n self.psfFitFlux = None\n self.psfFitNOthers = None\n\n # Things only set in _fitPsf when debugging is turned on:\n self.psfFitDebugPsf0Img = None\n self.psfFitDebugPsfImg = None\n self.psfFitDebugPsfDerivImg = None\n self.psfFitDebugPsfModel = None\n\n self.failedSymmetricTemplate = False\n\n # The actual template Image and Footprint\n self.templateImage = None\n self.templateFootprint = None\n\n # The flux assigned to this template -- a MaskedImage\n self.fluxPortion = None\n\n # The stray flux assigned to this template (may be None), a HeavyFootprint\n self.strayFlux = None\n\n self.hasRampedTemplate = False\n\n self.patched = False\n\n # debug -- a copy of the original symmetric template\n self.origTemplate = None\n self.origFootprint = None\n # MaskedImage\n self.rampedTemplate = None\n # MaskedImage\n self.medianFilteredTemplate = None\n\n # when least-squares fitting templates, the template weight.\n self.templateWeight = 1.0",
"def _paa(self):\n self.paa = np.array([self.series[i * self.points_per_symbol : (i + 1) * self.points_per_symbol].mean() for i in range(len(self.series) / self.points_per_symbol)])",
"def calculate_p(candidate, reference):\n matches = 0\n for grama in candidate:\n if grama in reference:\n matches += 1\n return matches/len(candidate)",
"def get_fpp(self, i_seq):\n if (not self.use_fdp[i_seq]):\n return None\n if (self.fpp_from_phaser_ax_sites is None):\n return self.site_fdp[i_seq]\n fpp = self.fpp_from_phaser_ax_sites[i_seq]\n if (fpp < 0) : fpp = None\n return fpp",
"def do_pnp(pts3d_for_pnp, pts2d_for_pnp, K, iterations=200, reprojThresh=5):\n list_pts3d_for_pnp = pts3d_for_pnp\n list_pts2d_for_pnp = pts2d_for_pnp\n pts3d_for_pnp = np.squeeze(np.array(pts3d_for_pnp))\n pts2d_for_pnp = np.expand_dims(np.squeeze(np.array(pts2d_for_pnp)), axis=1)\n num_pts = len(pts3d_for_pnp)\n\n highest_inliers = 0\n for i in range(iterations):\n pt_idxs = np.random.choice(num_pts, 6, replace=False)\n pts3 = np.array([pts3d_for_pnp[pt_idxs[i]] for i in range(len(pt_idxs))])\n pts2 = np.array([pts2d_for_pnp[pt_idxs[i]] for i in range(len(pt_idxs))])\n _, rvec, tvec = cv2.solvePnP(pts3, pts2, K, distCoeffs=np.array([]), flags=cv2.SOLVEPNP_ITERATIVE)\n R, _ = cv2.Rodrigues(rvec)\n pnp_errors, projpts, avg_err, perc_inliers = test_reproj_pnp_points(list_pts3d_for_pnp, list_pts2d_for_pnp, R, tvec, K, rep_thresh=reprojThresh)\n if highest_inliers < perc_inliers:\n highest_inliers = perc_inliers\n best_R = R\n best_tvec = tvec\n R = best_R\n tvec = best_tvec\n print('rvec:', rvec,'\\n\\ntvec:', tvec)\n\n return R, tvec",
"def pss(self):\n return (self.table[0, 0] * self.table[1, 1] - self.table[0, 1] * self.table[1, 0]) / \\\n ((self.table[0, 0] + self.table[1, 0]) * (self.table[0, 1] + self.table[1, 1]))",
"def getPSF(self, z_value, shape = None, normalize = False):\n assert False",
"def infer_pvalues(self, p=0.05, mcp='maxstat'):\n # ---------------------------------------------------------------------\n # check that pac and surrogates has already been computed\n assert hasattr(self, 'pac'), (\"You should compute PAC first. Use the \"\n \"`fit` method\")\n assert hasattr(self, 'surrogates'), \"No surrogates computed\"\n\n # mean pac and surrogates across trials\n m_pac, m_surro = self.pac.mean(2), self.surrogates.mean(3)\n self._pvalues = self._infer_pvalues(m_pac, m_surro, p=p, mcp=mcp)\n\n return self._pvalues",
"def compute_fpa_welfare(bidders, anonymous_reserve, points=1000):\r\n wel = 0.0\r\n for i in range(len(bidders[0].strategy.F_jump_points) - 1):\r\n bid_range_start = bidders[0].strategy.F_jump_points[i][0]\r\n bid_range_end = bidders[0].strategy.F_jump_points[i + 1][0]\r\n if bid_range_start < bid_range_end:\r\n num_points = max(100, int(points / (\r\n bidders[0].strategy.F_jump_points[-1][0] - bidders[0].strategy.F_jump_points[0][0]) *\r\n (bidders[0].strategy.F_jump_points[i + 1][0] - bidders[0].strategy.F_jump_points[i][0])))\r\n bid_range = np.linspace(bidders[0].strategy.F_jump_points[i][0],\r\n bidders[0].strategy.F_jump_points[i + 1][0],\r\n num=num_points)\r\n bid_range = bid_range[1:]\r\n\r\n wel_b = [0.0] * len(bid_range)\r\n for j in range(len(bid_range)):\r\n cdf, _, active_values = prob_dist(bidders, bid_range[j])\r\n if len(active_values) <= 1:\r\n wel_b[j] = 0.0\r\n else:\r\n wel_b[j] = np.prod(cdf) * (sum(active_values) / (len(active_values) - 1.0) *\r\n sum(1.0 / (np.array(active_values) - bid_range[j])) -\r\n sum(np.array(active_values) / (np.array(active_values) - bid_range[j])))\r\n wel += integrate.simps(wel_b, bid_range)\r\n\r\n # point mass at min winning bid\r\n min_winning_bid = max(compute_min_winning_bid(bidders), anonymous_reserve)\r\n values = [[-1.0] for _ in range(len(bidders))]\r\n probs = [[0.0] for _ in range(len(bidders))]\r\n for i in range(len(bidders)):\r\n bidder = bidders[i]\r\n F_min_winning_bid = bidder.strategy.F_jump_points[0][1]\r\n for j in range(len(bidder.prob)):\r\n if bidder.values[j] < min_winning_bid:\r\n probs[i][0] += bidder.prob[j]\r\n elif sum(bidder.prob[:j]) < F_min_winning_bid:\r\n values[i].append(bidder.values[j])\r\n probs[i].append(min(bidder.prob[j], F_min_winning_bid - sum(bidder.prob[:j])))\r\n else:\r\n break\r\n\r\n for i in range(len(bidders)):\r\n for i_value_idx in range(1, len(values[i])):\r\n i_value = values[i][i_value_idx]\r\n if i_value == 0.0:\r\n continue\r\n winning_prob = probs[i][i_value_idx]\r\n for j in range(len(bidders)):\r\n if j == i:\r\n continue\r\n j_losing_prob = 0.0\r\n for j_value_idx in range(len(values[j])):\r\n j_value = values[j][j_value_idx]\r\n if j_value < i_value or (j_value == i_value and j < i):\r\n j_losing_prob += probs[j][j_value_idx]\r\n winning_prob *= j_losing_prob\r\n wel += winning_prob * i_value\r\n return wel",
"def calP(self):\n N = len(self.listOfParticles)\n m = self.listOfParticles[0].m\n vsum = 0\n for particle in self.listOfParticles:\n vsum += particle.V.len()\n A = np.pi*self.R**2\n F = 0.5 * A * (2*self.R) * m * N * vsum**2\n return F",
"def do_instance_pruning(self):\n\n # retrieve the probability of predicting fraud for each model (K models)\n # size: K x ChunkSize x 2 (2 for binary labels)\n predict_proba_fraud = [-1] * self.K\n\n # for each instance in the data chunk\n for i, instance in enumerate(self.y_chunk):\n sum_weight = 0\n current_F = 0\n F_vect = np.zeros(self.K) # Fk at each stage\n\n # compute F_k(y) for k = 1...K - the classifiers are sorted in DESCENDING order of weights\n k = -1\n for model in self.models.islice(start=0, stop=self.K, reverse=True):\n k += 1\n clf = model.clf\n sum_weight += model.weight\n\n # compute the current probability\n # if the probability is not initialized we call the `predict_proba` method\n if (type(predict_proba_fraud[k]) is int and predict_proba_fraud[k] == -1) \\\n or (predict_proba_fraud[k].shape[0] != self.S):\n predict_proba_fraud[k] = clf.predict_proba(self.X_chunk)\n\n # check if we have the probabilities of 2 labels (because we're working with BINARY classification)\n # if we don't have the probability of predicting fraud it will be 0 so we don't do anything\n if len(predict_proba_fraud[k][i]) == 2:\n current_F += model.weight * predict_proba_fraud[k][i][1]\n\n # (2) compute the Fk for each example seen at each stage\n F_k = current_F / sum_weight\n F_vect[k] = F_k\n\n # (3) compute the error\n err_x = F_vect - F_vect[-1]\n\n # (4) update the mean and the variance of the error of these training examples for each bin (i,k)\n # we look at the error at each step for the given example\n for k, err in enumerate(err_x):\n # 1 --> we assign Fk to the corresponding bin (i,k) or (j,k)here because we used i index before\n eps = len(self.bins)\n\n for j in range(0, eps):\n if (j / eps) <= F_vect[k] < ((j + 1) / eps):\n self.bins[j][k]['num'] += 1\n\n # 2--> we compute the mean error in this bin\n self.bins[j][k]['mean'] += err\n\n # 2--> we compute the variance of the error in this bin\n # (basically we will just compute the squared error and do the division later)\n self.bins[j][k]['var'] += err ** 2\n\n # if we've assigned it to a bin, break and go to the next stage\n break\n\n # after computing everything we do the division by the total number assigned to a bin\n for i in range(0, len(self.bins)):\n # a bit tricky because sometimes we have bins that don't have any input example --> remains at 0\n for k in range(self.K):\n if self.bins[i][k]['num'] > 0:\n # divide the sum of error by the number of examples in the bin\n self.bins[i][k]['mean'] = self.bins[i][k]['mean'] / self.bins[i][k]['num']\n\n # compute the variance\n self.bins[i][k]['var'] = (self.bins[i][k]['var'] / self.bins[i][k]['num']) - \\\n (self.bins[i][k]['mean']) ** 2",
"def aksprob(alam):\r\n if type(alam) == N.ndarray:\r\n frozen = -1 *N.ones(alam.shape,N.float64)\r\n alam = alam.astype(N.float64)\r\n arrayflag = 1\r\n else:\r\n frozen = N.array(-1.)\r\n alam = N.array(alam,N.float64)\r\n arrayflag = 1\r\n mask = N.zeros(alam.shape)\r\n fac = 2.0 *N.ones(alam.shape,N.float_)\r\n sum = N.zeros(alam.shape,N.float_)\r\n termbf = N.zeros(alam.shape,N.float_)\r\n a2 = N.array(-2.0*alam*alam,N.float64)\r\n totalelements = N.multiply.reduce(N.array(mask.shape))\r\n for j in range(1,201):\r\n if asum(mask) == totalelements:\r\n break\r\n exponents = (a2*j*j)\r\n overflowmask = N.less(exponents,-746)\r\n frozen = N.where(overflowmask,0,frozen)\r\n mask = mask+overflowmask\r\n term = fac*N.exp(exponents)\r\n sum = sum + term\r\n newmask = N.where(N.less_equal(abs(term),(0.001*termbf)) +\r\n N.less(abs(term),1.0e-8*sum), 1, 0)\r\n frozen = N.where(newmask*N.equal(mask,0), sum, frozen)\r\n mask = N.clip(mask+newmask,0,1)\r\n fac = -fac\r\n termbf = abs(term)\r\n if arrayflag:\r\n return N.where(N.equal(frozen,-1), 1.0, frozen) # 1.0 if doesn't converge\r\n else:\r\n return N.where(N.equal(frozen,-1), 1.0, frozen)[0] # 1.0 if doesn't converge\r",
"def calPFP(n, m, k):\n return pow(1-math.exp(-k*(n+0.5)/(m-1)), k)",
"def computeProportionFuselage(self):\n\n # reads settings file for distribution\n if self.nFuselage > 0:\n self.fs_m_pointsA = []\n self.fs_m_pointsIy = []\n self.fs_m_pointsIz = []\n self.fs_m_pointsJ = []\n inputMaxA = self.settings[\"fuselage\"][\"mechanicalProperties\"][\"A\"]\n inputMaxIy = self.settings[\"fuselage\"][\"mechanicalProperties\"][\"Iy\"]\n inputMaxIz = self.settings[\"fuselage\"][\"mechanicalProperties\"][\"Iz\"]\n inputMaxJ = self.settings[\"fuselage\"][\"mechanicalProperties\"][\"J\"]\n inputInterpolationType = self.settings[\"fuselage\"][\"FEM\"][\"mechanicalInterpolationType\"]\n logger.debug(self.fs_m_pointsInitArea[0])\n index = np.argmax(self.fs_m_pointsInitArea[0])\n logger.debug(index)\n fuselageMaxArea = self.fs_m_pointsInitArea[0][index]\n\n if inputInterpolationType == \"constant\":\n exposant = 0\n elif inputInterpolationType == \"linear\":\n exposant = 1\n elif inputInterpolationType == \"quadratic\":\n exposant = 2\n else:\n logger.error(\"Fuselage mechanical properties distribution is\")\n logger.error(\"wrong. Accepted values are \\\"constant\\\",\")\n logger.error(\"\\\"linear\\\", \\\"quadratic\\\" \")\n sys.exit()\n\n coef = np.empty(self.userAskedNNodesFuselage)\n self.f_m_pointsA = np.empty(self.userAskedNNodesFuselage)\n self.f_m_pointsIy = np.empty(self.userAskedNNodesFuselage)\n self.f_m_pointsIz = np.empty(self.userAskedNNodesFuselage)\n self.f_m_pointsJ = np.empty(self.userAskedNNodesFuselage)\n for i in range(self.userAskedNNodesFuselage):\n coef[i] = self.fs_m_pointsInitArea[0][i]/fuselageMaxArea\n self.f_m_pointsA[i] = inputMaxA * coef[i]\n self.f_m_pointsIy[i] = inputMaxIy * coef[i]**exposant\n self.f_m_pointsIz[i] = inputMaxIz * coef[i]**exposant\n self.f_m_pointsJ[i] = inputMaxJ * coef[i]**exposant\n self.fs_m_pointsA.append(self.f_m_pointsA)\n self.fs_m_pointsIy.append(self.f_m_pointsIy)\n self.fs_m_pointsIz.append(self.f_m_pointsIz)\n self.fs_m_pointsJ.append(self.f_m_pointsJ)\n else:\n logger.warning(\"No fuselage found\")",
"def get_p_inf(clusters, shape):\n\n if len(clusters) == 0:\n return 0\n\n else:\n return max(clusters) / (shape[0] * shape[1])",
"def ppf(self,x):\n ppfValue = self._distribution.inverseCdf(x,random())\n return ppfValue",
"def ppf(self,x):\n ppfValue = self._distribution.inverseCdf(x,random())\n return ppfValue",
"def onestep_reachability(p_center, ssm, k_ff, l_mu, l_sigma, q_shape=None, k_fb=None,\n c_safety=1., verbose=1, a=None, b=None):\n n_s = np.shape(p_center)[0]\n n_u = np.shape(k_ff)[0]\n\n if a is None:\n a = np.eye(n_s)\n b = np.zeros((n_s, n_u))\n\n if q_shape is None: # the state is a point\n u_p = k_ff\n\n if verbose > 0:\n print(\"\\nApplying action:\")\n print(u_p)\n\n z_bar = np.vstack((p_center, u_p))\n\n mu_0, sigm_0, _ = ssm(p_center.T, u_p.T)\n mu_0 = np.array(mu_0)\n sigm_0 = np.array(sigm_0)\n\n rkhs_bounds = c_safety * np.sqrt(sigm_0.T).reshape((n_s,))\n\n q_1 = ellipsoid_from_rectangle(rkhs_bounds)\n\n p_lin = np.dot(a, p_center) + np.dot(b, u_p)\n p_1 = p_lin + mu_0\n\n if verbose > 0:\n print_ellipsoid(p_1, q_1, text=\"uncertainty first state\")\n\n return p_1, q_1\n else: # the state is a (ellipsoid) set\n if verbose > 0:\n print_ellipsoid(p_center, q_shape, text=\"initial uncertainty ellipsoid\")\n # compute the linearization centers\n x_bar = p_center # center of the state ellipsoid\n u_bar = k_ff # u_bar = K*(u_bar-u_bar) + k = k\n z_bar = np.vstack((x_bar, u_bar))\n\n if verbose > 0:\n print(\"\\nApplying action:\")\n print(u_bar)\n # compute the zero and first order matrices\n mu_0, sigm_0, jac_mu = ssm(x_bar.T, u_bar.T)\n mu_0 = np.array(mu_0)\n sigm_0 = np.array(sigm_0)\n jac_mu = np.array(jac_mu)\n\n if verbose > 0:\n print_ellipsoid(mu_0, diag(sigm_0.squeeze()),\n text=\"predictive distribution\")\n\n a_mu = jac_mu[:, :n_s]\n b_mu = jac_mu[:, n_s:]\n\n # reach set of the affine terms\n H = a + a_mu + np.dot(b_mu + b, k_fb)\n p_0 = mu_0 + np.dot(a, x_bar) + np.dot(b, u_bar)\n\n Q_0 = np.dot(H, np.dot(q_shape, H.T))\n\n if verbose > 0:\n print_ellipsoid(p_0, Q_0, text=\"linear transformation uncertainty\")\n # computing the box approximate to the lagrange remainder\n\n # lb_mean,ub_mean = compute_bounding_box_lagrangian(q_shape,L_mu,K,k,order = 2,verbose = verbose)\n # lb_sigm,ub_sigm = compute_bounding_box_lagrangian(q_shape,L_sigm,K,k,order = 1,verbose = verbose)\n ub_mean, ub_sigma = compute_remainder_overapproximations(q_shape, k_fb, l_mu,\n l_sigma)\n b_sigma_eps = c_safety * (np.sqrt(sigm_0.T) + ub_sigma)\n\n Q_lagrange_sigm = ellipsoid_from_rectangle(b_sigma_eps.squeeze())\n p_lagrange_sigm = zeros((n_s, 1))\n\n if verbose > 0:\n print_ellipsoid(p_lagrange_sigm, Q_lagrange_sigm,\n text=\"overapproximation lagrangian sigma\")\n\n Q_lagrange_mu = ellipsoid_from_rectangle(ub_mean)\n p_lagrange_mu = zeros((n_s, 1))\n\n if verbose > 0:\n print_ellipsoid(p_lagrange_mu, Q_lagrange_mu,\n text=\"overapproximation lagrangian mu\")\n\n p_sum_lagrange, Q_sum_lagrange = sum_two_ellipsoids(p_lagrange_sigm,\n Q_lagrange_sigm,\n p_lagrange_mu,\n Q_lagrange_mu)\n\n p_1, q_1 = sum_two_ellipsoids(p_sum_lagrange, Q_sum_lagrange, p_0, Q_0)\n\n if verbose > 0:\n print_ellipsoid(p_1, q_1, text=\"accumulated uncertainty current step\")\n\n print(\"volume of ellipsoid summed individually\")\n print((np.linalg.det(np.linalg.cholesky(q_1))))\n\n return p_1, q_1",
"def RUN_PULSAR(numTrials, rateMap, numPhotons=48,numPulsars = 6, angularSize=10.0, outputSize=100, mcList='MCOut.pickle',flatLevel = 0.0,HESS=False, Sig = -1,numProcs = 10):\r\n import FermiPSF, ParseFermi\r\n \r\n print 'Beginning MC Series\\nProgress'\r\n\r\n mcOut = []\r\n map = pickle.load(open(rateMap, \"r\" )) # load rate-map\r\n PSFTableFront = FermiPSF.PSF_130(convType='front') # load PSF front converting\r\n PSFTableBack = FermiPSF.PSF_130(convType='back') # load PSF back converting\r\n start = time.time();\r\n \r\n ppa = outputSize/angularSize # pixel per degree\r\n\r\n # Import background template\r\n bgmap = 'BGRateMap.pickle'\r\n if (HESS == True):\r\n bgmap = 'BGRateMap_HESS_2_deg.pickle'\r\n \r\n bgTemplate = pickle.load(open(bgmap , \"r\" ))\r\n \r\n mcOut = np.zeros(numTrials)\r\n p = pool.Pool(numProcs)\r\n partial_MC_PULSAR_THREAD = partial( MC_PULSAR_THREAD, map = map,bgTemplate=bgTemplate,PSFTableFront=PSFTableFront, PSFTableBack=PSFTableBack, HESS=HESS, angularSize=angularSize, numPhotons=numPhotons, outputSize=outputSize, numPulsars = numPulsars,Sig=Sig)\r\n mcOut = p.map(partial_MC_PULSAR_THREAD, mcOut)\r\n \r\n# for i in range(numTrials):\r\n# np.random.seed()\r\n# # Compute number of background photons\r\n# numSignal = np.random.poisson(lam = .25*numPhotons)\r\n# if (HESS == True):\r\n# numSignal = np.random.poisson(lam = .05*numPhotons)\r\n# if Sig >= 0:\r\n# numSignal = np.random.poisson(lam = Sig*numPhotons)\r\n# \r\n# bg = numPhotons-numSignal # number of BG photons\r\n# \r\n# # Build the background \r\n## background = Build_Background_Sideband(bgMean, lowSideband, highSideband, PSFTable)\r\n# background = Build_Background_Template(bg, bgTemplate, PSFTableFront, PSFTableBack ,HESS=HESS, angularSize = angularSize )\r\n# \r\n# \r\n# # Run MC for source photons \r\n# data = MC_PULSAR(map,numSignal, numPulsars,angularSize,outputSize,PSFTableFront, PSFTableBack, HESS = HESS)\r\n# # Concatenate and append this run to the simulation output\r\n# mcOut.append((data[0]+background[0], data[1]+background[1]))\r\n# \r\n# # Compute Speed Statistics\r\n# sys.stdout.write('\\r' + str(i+1)+'/'+str(numTrials)) \r\n# sys.stdout.flush()\r\n elapsed = time.time()-start;\r\n if (elapsed != 0.0):\r\n print '\\nSimulations Completed in', elapsed, 's', '(',numTrials/elapsed, ' sims per second)'\r\n \r\n outFile = open(mcList, \"wb\" )\r\n pickle.dump(mcOut, outFile)\r\n print 'Results saved to ', mcList\r\n return mcOut",
"def setup_fpa():\n # it is a silicon detector. Based on the graph, the quantum efficiency\n # at 1.06 um is ~50%.\n fpa = {}\n fpa[\"quantum_efficiency\"] = 0.5\n return fpa",
"def percentage_point(p):\n SPLIT1 = 0.425E0\n SPLIT2 = 5.0E0\n CONST1 = 0.180625E0\n CONST2 = 1.6E0\n\n A = partial(_polynomial,\n 3.3871328727963666080E0,\n 1.3314166789178437745E2,\n 1.9715909503065514427E3,\n 1.3731693765509461125E4,\n 4.5921953931549871457E4,\n 6.7265770927008700853E4,\n 3.3430575583588128105E4,\n 2.5090809287301226727E3\n )\n\n B = partial(_polynomial,\n 1,\n 4.2313330701600911252E1,\n 6.8718700749205790830E2,\n 5.3941960214247511077E3,\n 2.1213794301586595867E4,\n 3.9307895800092710610E4,\n 2.8729085735721942674E4,\n 5.2264952788528545610E3\n )\n\n C = partial(_polynomial,\n 1.42343711074968357734E0,\n 4.63033784615654529590E0,\n 5.76949722146069140550E0,\n 3.64784832476320460504E0,\n 1.27045825245236838258E0,\n 2.41780725177450611770E-1,\n 2.27238449892691845833E-2,\n 7.74545014278341407640E-4,\n )\n\n D = partial(_polynomial,\n 1,\n 2.05319162663775882187E0,\n 1.67638483018380384940E0,\n 6.89767334985100004550E-1,\n 1.48103976427480074590E-1,\n 1.51986665636164571966E-2,\n 5.47593808499534494600E-4,\n 1.05075007164441684324E-9\n )\n\n E = partial(_polynomial,\n 6.65790464350110377720E0,\n 5.46378491116411436990E0,\n 1.78482653991729133580E0,\n 2.96560571828504891230E-1,\n 2.65321895265761230930E-2,\n 1.24266094738807843860E-3,\n 2.71155556874348757815E-5,\n 2.01033439929228813265E-7\n )\n\n F = partial(_polynomial,\n 1,\n 5.99832206555887937690E-1,\n 1.36929880922735805310E-1,\n 1.48753612908506148525E-2,\n 7.86869131145613259100E-4,\n 1.84631831751005468180E-5,\n 1.42151175831644588870E-7,\n 2.04426310338993978564E-15\n )\n\n q = p - 0.5\n if abs(q) < SPLIT1:\n r = CONST1 - q ** 2\n ppnd16 = q * A(r) / B(r)\n else:\n r = p if q < 0 else 1 - p\n if r < 0:\n raise ValueError()\n r = math.sqrt(-math.log(r))\n if r < SPLIT2:\n r = r - CONST2\n ppnd16 = C(r) / D(r)\n else:\n r = r - SPLIT2\n ppnd16 = E(r) / F(r)\n if q < 0:\n ppnd16 = -ppnd16\n return ppnd16",
"def _residual_edp(self, params):\n data = self.F**2\n model = np.absolute(self._model())**2\n sigma = self.sigma\n return (data[self.mask]-model[self.mask]) / sigma[self.mask] \n \n # The following three lines do not reproduce Sun's results, which proves\n # that the fits were done through intensity, not form factor.\n #data = self.F\n #model = np.absolute(self._model())\n #return (data - model) ",
"def getHFprofile(self, PFC):\n psi = PFC.psimin\n R_omp = self.map_R_psi(psi,PFC)\n Z_omp = np.zeros(R_omp.shape)\n # Evaluate B at midplane\n Bp_omp = PFC.ep.BpFunc.ev(R_omp,Z_omp)\n Bt_omp = PFC.ep.BtFunc.ev(R_omp,Z_omp)\n B_omp = np.sqrt(Bp_omp**2 + Bt_omp**2)\n xyz = PFC.centers\n R_div,Z_div,phi_div = tools.xyz2cyl(xyz[:,0],xyz[:,1],xyz[:,2])\n print('phi_divMin = {:f}'.format(phi_div.min()))\n print('phi_divMax = {:f}'.format(phi_div.max()))\n # Evaluate B at Target Plate neglecting shadowed points\n Bp_div = PFC.ep.BpFunc.ev(R_div,Z_div)\n Bt_div = PFC.ep.BtFunc.ev(R_div,Z_div)\n B_div = np.sqrt(Bp_div**2 + Bt_div**2)\n #Calculate psi using gfile for scaling coefficient\n psi_EQ = PFC.ep.psiFunc.ev(R_div,Z_div)\n #Calculate poloidal flux expansion\n #fx = R_div*Bp_div / (R_omp*Bp_omp)\n q = np.zeros(PFC.centers[:,0].shape)\n use = np.where(PFC.shadowed_mask == 0)[0]\n\n #handle various heat flux regressions if user selected that in GUI\n if self.lqCNmode == 'eich':\n self.getEichFromEQ(PFC.ep)\n self.lqCN = self.lqEich\n\n if self.SMode == 'makowski':\n self.getMakowskiFromEQ(PFC.ep, self.fG)\n\n if self.lqCFmode == 'horacek':\n self.getHoracekFromEQ(PFC.ep)\n\n\n print(\"Heat flux profile type: \"+self.hfMode)\n #Multiple exponential profile (Brunner Profile)\n if self.hfMode=='multiExp' or self.hfMode=='limiter':\n q[use] = self.multiExp_profile_fluxspace(PFC, R_omp, Bp_omp, psi, self.hfMode)\n if self.hfMode =='multiExp':\n print(\"lqCN: {}\".format(self.lqCN))\n print(\"lqCF: {}\".format(self.lqCF))\n print(\"lqPN: {}\".format(self.lqPN))\n print(\"lqPF: {}\".format(self.lqPF))\n elif self.hfMode == 'limiter':\n print(\"lqCN: {}\".format(self.lqCN))\n print(\"lqCF: {}\".format(self.lqCF))\n\n #Eich Profile\n else:\n q0 = self.scaleHF_fluxspace(PFC,self.lqCN,self.S,self.Psol)\n q[use] = self.eich_profile_fluxspace(PFC, self.lqCN, self.S, R_omp, Bp_omp, psi)\n q *= q0\n q += self.qBG\n print(\"lqCN: {} [mm]\".format(self.lqCN))\n print(\"S: {} [mm]\".format(self.S))\n print(\"q0 {} [MW/m^2]\".format(q0))\n\n #Scale by fraction of power going to this PFC's divertor\n PFC.powerFrac = self.getDivertorPowerFraction(PFC.DivCode)\n q *= PFC.powerFrac\n print(\"PFC \"+PFC.name+\" has {:.2f}% of the total power\".format(PFC.powerFrac*100.0))\n log.info(\"PFC \"+PFC.name+\" has {:.2f}% of the total power\".format(PFC.powerFrac*100.0))\n\n return q",
"def ppf(self, q):\n self.value = self.rv.ppf(\n q, *self._pymc_dists_to_value(self.args), **self.kwds\n )\n return self.value",
"def calc_std_nDCG_AP_corpus_smoothing(p):\n \n# nDCG_MAP_res = base_path +\"\\\\nDCG_MAP_res\\\\\"\n measures_res = linux_base_path+ \"/measures_res\"+setup+\"/\"\n k_val = 50\n NDCG_AP_all_claims_all_param_values = read_pickle(measures_res+\"NDCG_AP_prec_at_k_all_claims_all_param_values_top_k_docs_\"+str(k_val)+\"_at_\"+str(p)) #key:clm,alpha_f,beta_f,k_val,lambda_f val nDCG_score,AP_score\n each_params_AVGnDCG_MAP_dict = read_pickle(measures_res+\"each_params_AVGnDCG_MAP_prec_at_k_dict_top_k_docs_\"+str(k_val)+\"_at_\"+str(p)) #key:alpha_f,beta_f,k_val,lambda_f\n nDCG_MAP_std = {} #key is a configuration quadruplet, value is the std of the measures\n \n \n \n# for k_val in top_k_docs_values:\n for alpha in range(0,11,1): #change just for test!\n for beta in range(0,10,1):\n for lambda_int in range(0,11,1):\n lambda_f = turn_to_float([lambda_int])\n (alpha_f,beta_f) = turn_to_float([alpha,beta])\n curr_AP_var = 0\n curr_nDCG_var = 0\n curr_prec_at_5_var = 0\n curr_prec_at_10_var = 0\n for clm in claim_list:\n curr_nDCG_var += (NDCG_AP_all_claims_all_param_values[str(clm),alpha_f,beta_f,k_val,lambda_f][0] - each_params_AVGnDCG_MAP_dict[alpha_f,beta_f,k_val,lambda_f][0])**2\n curr_AP_var += (NDCG_AP_all_claims_all_param_values[str(clm),alpha_f,beta_f,k_val,lambda_f][1] - each_params_AVGnDCG_MAP_dict[alpha_f,beta_f,k_val,lambda_f][1])**2\n curr_prec_at_5_var += (NDCG_AP_all_claims_all_param_values[str(clm),alpha_f,beta_f,k_val,lambda_f][2] - each_params_AVGnDCG_MAP_dict[alpha_f,beta_f,k_val,lambda_f][2])**2\n curr_prec_at_10_var +=(NDCG_AP_all_claims_all_param_values[str(clm),alpha_f,beta_f,k_val,lambda_f][3] - each_params_AVGnDCG_MAP_dict[alpha_f,beta_f,k_val,lambda_f][3])**2\n curr_nDCG_std = float(float(math.sqrt(curr_nDCG_var))/float(len(claim_list)))\n curr_AP_std = float(float(math.sqrt(curr_AP_var))/float(len(claim_list)))\n curr_prec_at_5_std = float(float(math.sqrt(curr_prec_at_5_var))/float(len(claim_list)))\n curr_prec_at_10_std =float(float(math.sqrt(curr_prec_at_10_var))/float(len(claim_list)))\n nDCG_MAP_std[alpha_f,beta_f,k_val,lambda_f] = (curr_nDCG_std,curr_AP_std,curr_prec_at_5_std,curr_prec_at_10_std)\n save_pickle(measures_res+\"nDCG_MAP_prec_at_k_std_for_each_configuration_k_top_docs_\"+str(k_val)+\"_at_\"+str(p), nDCG_MAP_std)",
"def test_special_PSX(self, angexp):\n a, b, c = angexp[0]\n tgt = U3Gate(a, b, c).to_matrix()\n exp = {(\"p\", \"sx\")[g]: angexp[1][g] for g in (0, 1) if angexp[1][g]}\n self.check_oneq_special_cases(tgt, \"PSX\", exp)",
"def validate(self):\n vnames = []\n vnames.append(self.pltw.vectInfolst[self.blkno][0].name)\n vnames.append(self.pltw.vectInfolst[self.blkno][1].name)\n vnames.append(\"pksum\")\n\n txt = \"Number of peaks = {0}\\n\".format(self.npeaks)\n txt += \"Fitting results:\\n\"\n txt += \"pktyp\\t xm\\t amp\\t width\\t area\\n\"\n # Remove the vector difference at the end\n newset = self.data[:-1]\n for i in range(0, len(self.parmVal), self.maxparm):\n pkno = int(i/self.maxparm)\n ptyp = self.peakTyp[pkno]\n xm = self.parmVal[i]\n amp = self.parmVal[i+1]\n w = self.parmVal[i+2]\n a = self.parmVal[i+3]\n m = self.parmVal[i+4]\n if ptyp == 'G':\n S = self.gauss(self.data[0], xm, amp, w)\n elif ptyp == 'L':\n S = self.lorentz(self.data[0], xm, amp, w)\n elif ptyp == 'P':\n S = self.psVoigt(self.data[0], xm, amp, w, m)\n elif ptyp == 'AG':\n S = self.agauss(self.data[0], xm, amp, w, a)\n elif ptyp == 'AL':\n S = self.alorentz(self.data[0], xm, amp, w, a)\n elif ptyp == 'AP':\n S = self.aPsVoigt(self.data[0], xm, amp, w, a, m)\n newset = np.vstack((newset, S))\n area = calcArea(self.data[0], S, True)\n area = round_to_n(area, 4)\n xm = round_to_n(xm, 4)\n amp = round_to_n(amp, 4)\n w = round_to_n(w, 4)\n # a = round_to_n(a, 4)\n # m = round_to_n(m, 4)\n vnames.append(\"pk{0}\".format(pkno+1))\n txt += \"{0}\\t {1}\\t {2} \\t {3}\\t {4}\\n\".format(ptyp, xm, amp, w, area)\n\n # Save data and peaks in a text file\n stnam = \"\\n{0}\".format(\"\\t\".join(vnames))\n txt += stnam\n savename = os.path.join(self.parent.progpath, \"peakfit.txt\")\n np.savetxt(savename, np.transpose(newset), fmt='%+1.4E', delimiter='\\t', header=txt)\n # load the converted file\n self.parent.loadFile(savename)\n self.hide()",
"def get_Pn(f, L, S_lp, S_ac): \r\n # single-link optical metrology noise (Hz^{-1}), Equation (10)\r\n P_oms = S_lp**2 \r\n # single test mass acceleration noise, Equation (11)\r\n P_acc = S_ac**2*(1. + 0.1e-3/f) \r\n # total noise in Michelson-style LISA data channel, Equation (12)\r\n Pn = (P_oms + 4.*P_acc/(2.*pi*f)**4.)/L**2. \r\n return Pn",
"def prf(stats):\n if stats['pred'] == 0:\n return 0, 0, 0\n p = stats['corr']/stats['pred']\n r = stats['corr']/stats['gold']\n if p > 0 and r > 0:\n f = 2*p*r/(p+r)\n else:\n f = 0\n return p, r, f",
"def PF(x_input, y_input, ppnm, Thr = 5):\n\n #Make independent copies of input\n x_vals = copy.deepcopy(x_input)\n y_vals = copy.deepcopy(y_input)\n \n Threshold = Thr * ppnm\n \n ### Sets all positive values to zero.\n for index, value in enumerate(y_vals):\n if value > 0:\n y_vals[index] = 0\n \n ### Variables used to filter peaks based on their width.\n InPeak = False # True if last value was part of a peak. Else it is False.\n count = 0 # Counts peaks of each rectraction curve.\n peaks = {} # Dictionary that contains All peaks (Unfiltered).\n PeakList = [] # Pooled indeces of broad peaks.\n \n ### Goes through every value in y_vals and detects if it is at the start, end or within a peak.\n ### Saves the corresponding indeces in lists (peakX).\n for index, value in enumerate(y_vals):\n \n ###Start of a Peak\n if value < 0 and InPeak == False:\n InPeak = True\n peaks['peak' + str(count)] = []\n peaks['peak' + str(count)].append(index)\n \n ###During Peak\n elif value < 0 and InPeak == True:\n peaks['peak' + str(count)].append(index)\n \n ###End of Peak\n elif value == 0 and InPeak == True:\n InPeak = False\n count += 1\n \n ### If a peak is broader than the Threshold it is added to PeakList\n for number in peaks:\n if len(peaks[number]) > Threshold:\n PeakList += peaks[number]\n\n ### Sets all values at indeces not contained in PeakList to zero. Only broad peaks stay.\n for index, value in enumerate(y_vals):\n if index not in PeakList:\n y_vals[index] = 0\n\n return x_vals, y_vals",
"def solar_ppa():\n per_kwh = 0.196 # [$/kWh]\n\n return per_kwh",
"def evolveSFParams(self, p, Q, Pe, evol='z', z0=0.0):\n\n zmeans = ( self.zbins[1:] + self.zbins[:-1] ) / 2\n par = np.zeros((len(zmeans), 3))\n\n for i, z in enumerate(zmeans):\n par[i,:] = copy(p)\n par[i,0] *= 10 ** (0.4 * Pe * (z - z0))\n par[i,1] -= Q * (z - z0)\n\n return par",
"def compute_ps_mass(ps):\n\treturn sum(AA_mass_table[it] for it in ps)",
"def BSGS(self, P):\n if P == self.infpoint:\n return 1\n\n bfsize = card(self.basefield)\n\n Q = self.mul(bfsize + 1, P)\n m = arith1.floorpowerroot(bfsize, 4) + 1\n Plist = [self.infpoint]\n R = P\n j = 1\n while j <= m:\n Plist.append(R)\n R = self.add(R, P)\n j = j+1\n R = self.mul(2*m, P)\n k = -m\n Plist_rev = list(map(self.mul, [-1]*(m+1), Plist)) # make reverse point mapping\n j = 0\n while k <= m:\n S = self.add(Q, self.mul(k, R))\n if S in Plist:\n j = Plist.index(S)\n break\n elif S in Plist_rev:\n j = -Plist_rev.index(S)\n break\n k = k+1\n M = self.ch+1+2*m*k-j\n Flist = factor_methods.factor(M)\n for p, e in Flist:\n for i in range(e):\n if self.mul(M//p, P) == self.infpoint:\n M = M//p\n return M",
"def MC_PULSAR(rateMap,numPhotons,numPulsars,angularSize,outputSize,PSFTableFront, PSFTableBack, HESS = False):\r\n \r\n x_dimen = np.shape(rateMap)[0]\r\n y_dimen = np.shape(rateMap)[1]\r\n \r\n outputScaleFactor = float(outputSize)/float(x_dimen)\r\n \r\n APP = float(angularSize)/float(outputSize)\r\n \r\n photonListX = []\r\n photonListY = []\r\n photonCount = 0\r\n \r\n pulsarListX = []\r\n pulsarListY = []\r\n pulsarCount = 0\r\n \r\n # First pick out the projected position of the pulsars\r\n while (pulsarCount < numPulsars):\r\n # Choose a random coordinate in the rate map\r\n x = np.random.randint(0,high=x_dimen)\r\n y = np.random.randint(0,high=y_dimen)\r\n # Look up value of annihilation rate\r\n rate = rateMap[x,y]\r\n # Select random number between 0 and 1. If rate is greater, then we accept a pulsar here\r\n if (np.random.ranf() < rate):\r\n # Shift and scale coordinates to output map\r\n x = int(round((x)*outputScaleFactor)) \r\n y = int(round((y)*outputScaleFactor))\r\n # Ensure that we are still in the region of interest after PSF modification\r\n if (abs(x) <= outputSize and abs(y) <= outputSize):\r\n pulsarListX.append(x)\r\n pulsarListY.append(y)\r\n pulsarCount+=1\r\n \r\n # Now for each photon we must choose a progenitor pulsar and modify the photon position by the Fermi PSF\r\n while (photonCount <numPhotons):\r\n # Choose a random pulsar (need to weight by distance?)\r\n idx = np.random.randint(0,high=numPulsars)\r\n \r\n x = pulsarListX[idx]\r\n y = pulsarListY[idx]\r\n \r\n # Currently equal weight given to each pulsar so no need for anything but PSF modification.\r\n \r\n # Shift and scale coordinates to output map and then compute PSF modification to the position.\r\n psfMod = PSF_Spread(PSFTableFront, PSFTableBack, HESS = HESS)\r\n dx = psfMod[0]*math.cos(psfMod[1])/APP # PSF shift in output pixels\r\n dy = psfMod[0]*math.sin(psfMod[1])/APP # \r\n x = x*outputScaleFactor + dx \r\n y = y*outputScaleFactor + dy\r\n \r\n # Ensure that we are still in the region of interest after PSF modification\r\n if (abs(x) <= outputSize and abs(y) <= outputSize):\r\n photonListX.append((x-float(outputSize)/2.0)*APP)\r\n photonListY.append((y-float(outputSize)/2.0)*APP)\r\n photonCount+=1 \r\n return (photonListX,photonListY)",
"def calcSFP(gamma_tilt_deg, created_pal, SFP_config, Tech_res):\n # general\n gamma_n = gamma_tilt_deg / 180 * np.pi\n N = SFP_config.N\n \n ########################## SIMULATION SETUP ###############################\n # reference pressure\n p0 = 2 * 10**(-5)\n # frequencies\n f = get_freq_vec(N_freq=120, step_freq=1/12, freq_range=[20,20000])\n f_xy = np.array([100, 200, 400, 800, 1000, 2000, 5000, 10000, 16000])\n\n # initialize variables\n omega = 2 * np.pi * f\n omega_xy = 2 * np.pi * f_xy\n D_opt_LSA = np.ones([N, np.shape(f)[0]])\n P_LSA = np.zeros([np.shape(created_pal.xline)[0],np.shape(f)[0]], dtype=complex)\n\n # air attenuation\n alpha, c = AirAbsorptionCoefficient(f, T=293.15, p=101.325*10**(3), h=50)\n\n # directivity\n # if PALC_config.directivity not in ['Measured Loudspeaker Data']:\n # dire_meas_LSA = np.ones([np.shape(f)[0],np.shape(f)[0]])\n # dire_meas_deg = np.ones([np.shape(f)[0],np.shape(f)[0]])\n \n ######################### SPL CALCULATION #################################\n x_start, y_start, x_stop, y_stop, x_c_n, y_c_n, x_S, y_S = source_pos(gamma_n, SFP_config)\n\n for n in range(np.shape(f)[0]):\n G_LSA_vert = CalcGreenFunctions(created_pal.xline, created_pal.yline, np.array([0]), \\\n x_c_n, y_c_n, 0.82, SFP_config.directivity, \\\n SFP_config.Lambda_y, gamma_n, c, omega[n], 1, \\\n np.array(SFP_config.dir_meas[:,n]), \\\n np.array(SFP_config.dir_meas_deg[:,1]), \\\n alpha[n], f, n )\n\n P_LSA[:,n] = G_LSA_vert @ D_opt_LSA[:,n] # D_opt_LSA possibility to include driving functions\n p_SPL = 20 * np.log10(np.abs(P_LSA) / p0)\n Tech_res.update_tech_meas(p_SPL=p_SPL, f=f)\n return x_S, y_S",
"def _correct_p(self, f0, f1):\n return self.p * np.exp(self.dbeta * (f0 + f1) / 2)",
"def unifpf(self):\n if self._unifpf is None:\n self._unifpf = bb.algportfolio.build(self.algds)\n return self._unifpf",
"def testPeakLikelihoodFlux(self):\n # make mp: a flux measurer\n measControl = measAlg.PeakLikelihoodFluxControl()\n schema = afwTable.SourceTable.makeMinimalSchema()\n mp = measAlg.MeasureSourcesBuilder().addAlgorithm(measControl).build(schema)\n \n # make and measure a series of exposures containing just one star, approximately centered\n bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0), afwGeom.Extent2I(100, 101))\n kernelWidth = 35\n var = 100\n fwhm = 3.0\n sigma = fwhm/FwhmPerSigma\n convolutionControl = afwMath.ConvolutionControl()\n psf = measAlg.SingleGaussianPsf(kernelWidth, kernelWidth, sigma)\n psfKernel = psf.getLocalKernel()\n psfImage = psf.computeKernelImage()\n sumPsfSq = numpy.sum(psfImage.getArray()**2)\n psfSqArr = psfImage.getArray()**2\n for flux in (1000, 10000):\n ctrInd = afwGeom.Point2I(50, 51)\n ctrPos = afwGeom.Point2D(ctrInd)\n\n kernelBBox = psfImage.getBBox(afwImage.PARENT)\n kernelBBox.shift(afwGeom.Extent2I(ctrInd))\n\n # compute predicted flux error\n unshMImage = makeFakeImage(bbox, [ctrPos], [flux], fwhm, var)\n\n # filter image by PSF\n unshFiltMImage = afwImage.MaskedImageF(unshMImage.getBBox(afwImage.PARENT))\n afwMath.convolve(unshFiltMImage, unshMImage, psfKernel, convolutionControl)\n \n # compute predicted flux = value of image at peak / sum(PSF^2)\n # this is a sanity check of the algorithm, as much as anything\n predFlux = unshFiltMImage.getImage().get(ctrInd[0], ctrInd[1]) / sumPsfSq\n self.assertLess(abs(flux - predFlux), flux * 0.01)\n \n # compute predicted flux error based on filtered pixels\n # = sqrt(value of filtered variance at peak / sum(PSF^2)^2)\n predFluxErr = math.sqrt(unshFiltMImage.getVariance().get(ctrInd[0], ctrInd[1])) / sumPsfSq\n\n # compute predicted flux error based on unfiltered pixels\n # = sqrt(sum(unfiltered variance * PSF^2)) / sum(PSF^2)\n # and compare to that derived from filtered pixels;\n # again, this is a test of the algorithm\n varView = afwImage.ImageF(unshMImage.getVariance(), kernelBBox)\n varArr = varView.getArray()\n unfiltPredFluxErr = math.sqrt(numpy.sum(varArr*psfSqArr)) / sumPsfSq\n self.assertLess(abs(unfiltPredFluxErr - predFluxErr), predFluxErr * 0.01)\n \n for fracOffset in (afwGeom.Extent2D(0, 0), afwGeom.Extent2D(0.2, -0.3)):\n adjCenter = ctrPos + fracOffset\n if fracOffset == (0, 0):\n maskedImage = unshMImage\n filteredImage = unshFiltMImage\n else:\n maskedImage = makeFakeImage(bbox, [adjCenter], [flux], fwhm, var)\n # filter image by PSF\n filteredImage = afwImage.MaskedImageF(maskedImage.getBBox(afwImage.PARENT))\n afwMath.convolve(filteredImage, maskedImage, psfKernel, convolutionControl)\n\n exposure = afwImage.makeExposure(filteredImage)\n exposure.setPsf(psf)\n \n table = afwTable.SourceTable.make(schema)\n source = table.makeRecord()\n mp.apply(source, exposure, afwGeom.Point2D(*adjCenter))\n measFlux = source.get(measControl.name)\n measFluxErr = source.get(measControl.name + \".err\")\n self.assertFalse(source.get(measControl.name + \".flags\"))\n self.assertLess(abs(measFlux - flux), flux * 0.003)\n \n self.assertLess(abs(measFluxErr - predFluxErr), predFluxErr * 0.2)\n\n # try nearby points and verify that the flux is smaller;\n # this checks that the sub-pixel shift is performed in the correct direction\n for dx in (-0.2, 0, 0.2):\n for dy in (-0.2, 0, 0.2):\n if dx == dy == 0:\n continue\n offsetCtr = afwGeom.Point2D(adjCenter[0] + dx, adjCenter[1] + dy)\n table = afwTable.SourceTable.make(schema)\n source = table.makeRecord()\n mp.apply(source, exposure, offsetCtr)\n offsetFlux = source.get(measControl.name)\n self.assertLess(offsetFlux, measFlux)\n \n # source so near edge of image that PSF does not overlap exposure should result in failure\n \n for edgePos in (\n (1, 50),\n (50, 1),\n (50, bbox.getHeight() - 1),\n (bbox.getWidth() - 1, 50),\n ):\n table = afwTable.SourceTable.make(schema)\n source = table.makeRecord()\n mp.apply(source, exposure, afwGeom.Point2D(*edgePos))\n self.assertTrue(source.get(measControl.name + \".flags\"))\n \n # no PSF should result in failure: flags set\n noPsfExposure = afwImage.ExposureF(filteredImage)\n table = afwTable.SourceTable.make(schema)\n source = table.makeRecord()\n mp.apply(source, noPsfExposure, afwGeom.Point2D(*adjCenter))\n self.assertTrue(source.get(measControl.name + \".flags\"))",
"def measureSJF(p):\r\n\treturn measureFCFS(msort(p))",
"def getProb(params, nstates, seqlen, trees):\n init, trans, scale = unfold_params(params, nstates=nstates)\n p = 0\n if scale == 0:\n return np.NINF\n new_trees = trees * scale\n return -forward(seqlen, normalize(trans), normalize(init), new_trees)",
"def _calc_pareto_front(self, *args, **kwargs):\n fname = f\"{self.fct.name}_PF.dat\"\n F = load_pareto_front_from_file(os.path.join(\"modact\", fname))\n if F is not None:\n return F*self.weights*-1",
"def _update_ps(self, es):\n if not self.is_initialized:\n self.initialize(es)\n if self._ps_updated_iteration == es.countiter:\n return\n z = es.isotropic_mean_shift\n if es.opts['CSA_clip_length_value'] is not None:\n vals = es.opts['CSA_clip_length_value']\n try: len(vals)\n except TypeError: vals = [-np.inf, vals]\n if vals[0] > 0 or vals[1] < 0:\n raise ValueError(\n \"\"\"value(s) for option 'CSA_clip_length_value' = %s\n not allowed\"\"\" % str(es.opts['CSA_clip_length_value']))\n min_len = es.N**0.5 + vals[0] * es.N / (es.N + 2)\n max_len = es.N**0.5 + vals[1] * es.N / (es.N + 2)\n act_len = _norm(z)\n new_len = Mh.minmax(act_len, min_len, max_len)\n if new_len != act_len:\n z *= new_len / act_len\n # z *= (es.N / sum(z**2))**0.5 # ==> sum(z**2) == es.N\n # z *= es.const.chiN / sum(z**2)**0.5\n self.ps = (1 - self.cs) * self.ps + _sqrt(self.cs * (2 - self.cs)) * z\n self._ps_updated_iteration = es.countiter",
"def test_ppt_distinguishability_four_bell_states():\n rho_1 = bell(0) * bell(0).conj().T\n rho_2 = bell(1) * bell(1).conj().T\n rho_3 = bell(2) * bell(2).conj().T\n rho_4 = bell(3) * bell(3).conj().T\n\n e_0, e_1 = basis(2, 0), basis(2, 1)\n e_00 = np.kron(e_0, e_0)\n e_11 = np.kron(e_1, e_1)\n\n eps = 0.5\n resource_state = np.sqrt((1 + eps) / 2) * e_00 + np.sqrt((1 - eps) / 2) * e_11\n resource_state = resource_state * resource_state.conj().T\n\n states = [\n np.kron(rho_1, resource_state),\n np.kron(rho_2, resource_state),\n np.kron(rho_3, resource_state),\n np.kron(rho_4, resource_state),\n ]\n probs = [1 / 4, 1 / 4, 1 / 4, 1 / 4]\n\n exp_res = 1 / 2 * (1 + np.sqrt(1 - eps**2))\n\n primal_res = ppt_distinguishability(states, probs=probs, dist_method=\"min-error\", strategy=True)\n dual_res = ppt_distinguishability(states, probs=probs, dist_method=\"min-error\", strategy=False)\n\n np.testing.assert_equal(np.isclose(primal_res, exp_res, atol=0.001), True)\n np.testing.assert_equal(np.isclose(dual_res, exp_res, atol=0.001), True)",
"def PmfCorrect(efficacy, difficulties):\n pmf0 = thinkbayes2.Pmf([0])\n\n ps = [ProbCorrect(efficacy, difficulty) for difficulty in difficulties]\n pmfs = [BinaryPmf(p) for p in ps]\n dist = sum(pmfs, pmf0)\n return dist",
"def compute_prob(client_as, \r\n mal_guard_fp,\r\n mal_guard_bw,\r\n network_state,\r\n pfi):\r\n \r\n # unpack network state\r\n fp_to_as = {k:v[0] for k,v in network_state.items()}\r\n fp_to_bw = {k:v[1] for k,v in network_state.items()}\r\n\r\n safe_guard_fps = get_usable_guards(client_as, fp_to_as, pfi)\r\n\r\n if len(safe_guard_fps) == 0:\r\n bw_sum = sum(fp_to_bw.values())\r\n prob = mal_guard_bw / bw_sum\r\n\r\n else:\r\n if mal_guard_fp not in safe_guard_fps:\r\n prob = 0\r\n\r\n else:\r\n bw_sum = sum(map(lambda x: fp_to_bw[x], safe_guard_fps))\r\n prob = mal_guard_bw / bw_sum\r\n\r\n return prob",
"def match_psf_fits(fp_img, fp_psf, fp_psfto, fp_img_out, fp_psf_out, fp_psk_out, overwrite=True, towrite_psk=False):\n\n\timg = fits.getdata(fp_img)\n\tpsf = fits.getdata(fp_psf)\n\tpsfto = fits.getdata(fp_psfto)\n\n\t# sanity check -- normalization of psf\n\tfor a in [psf, psfto]:\n\t\tif np.absolute(np.sum(a) - 1.) > 3.e-4:\n\t\t\traise ValueError(\"[matchpsf] input psf is not normalized with sum\".format('%.5f'%np.sum(a)))\n\n\timg_out, psf_out, psk_out, = match_psf(img, psf, psfto)\n\n\treplace_img_in_fits(fn_from=fp_img, fn_to=fp_img_out, img=img_out, comment=\"PSF matched by ALS\", overwrite=overwrite)\n\n\tfits.PrimaryHDU(psf_out).writeto(fp_psf_out, overwrite=overwrite)\n\tif towrite_psk:\n\t\tfits.PrimaryHDU(psk_out).writeto(fp_psk_out, overwrite=overwrite)",
"def calculate_f_p(genes, gene_abundance_file, gene_molecular_weight_file):\n gene_abundance = pd.read_csv(gene_abundance_file, index_col=0)\n gene_molecular_weight = json_load(gene_molecular_weight_file)\n enzy_abundance = 0\n pro_abundance = 0\n for gene_i in gene_abundance.index:\n if gene_i in gene_molecular_weight.keys():\n abundance = gene_abundance.loc[gene_i, 'abundance'] * \\\n gene_molecular_weight[gene_i]/1000\n pro_abundance += abundance\n if gene_i in genes.index:\n enzy_abundance += abundance\n f = enzy_abundance/pro_abundance\n return f",
"def calc_points_park(self):\n be = ['_'] * 8\n be += self.b[ 0: 5]\n be += ['_'] * 2\n be += self.b[ 5:10]\n be += ['_'] * 2\n be += self.b[10:15]\n be += ['_'] * 2\n be += self.b[15:20]\n be += ['_'] * 8\n cnt_PG = 0\n cnt_P = 0\n points = 0\n vptab_park = (0, 2, 4, 7, 11)\n for i in range(8, 34):\n if be[i] == 'P' or be[i] == 'G':\n cnt_PG += 1\n if be[i] == 'P':\n cnt_P += 1\n neigh_tower_office = 0\n if be[i - 1] == 'T' or be[i - 1] == 'O':\n neigh_tower_office += 1\n if be[i + 1] == 'T' or be[i + 1] == 'O':\n neigh_tower_office += 1\n if be[i - 7] == 'T' or be[i - 7] == 'O':\n neigh_tower_office += 1\n if be[i + 7] == 'T' or be[i + 7] == 'O':\n neigh_tower_office += 1\n points += vptab_park[neigh_tower_office]\n if 'park' in args.exp:\n points += cnt_PG\n if 'repr' in args.exp:\n recycle_energy = max(self.energy - self.energy_used, 0)\n points += recycle_energy\n else:\n penalty_energy = max(self.energy - self.energy_used - cnt_P, 0)\n points -= penalty_energy\n return points",
"def pc_nproduced_avg(self):\n return _spacegrant_swig.invert_bit_sptr_pc_nproduced_avg(self)",
"def calc_pucker_torsion(self):\n return self.calc_torsion(\"pucker\")",
"def calculateP(SD, numDiff):\n return numDiff/SD",
"def calculateP(SD, numDiff):\n return numDiff/SD",
"def no_match():\n S1=Spectrum.Spectrum()\n S1.add_peak(50.7,234)\n S1.add_peak(54.6,585)\n S1.add_peak(60.7,773)\n S1.add_peak(65.6,387)\n S1.add_peak(87.7,546)\n S1.add_peak(104.6,598)\n S1.pep_mass=100\n S1.euclidean_scale()\n\n S2=Spectrum.Spectrum()\n S2.add_peak(50.2,234)\n S2.add_peak(53.8,585)\n S2.add_peak(61.3,773)\n S2.add_peak(66.2,387)\n S2.add_peak(88.1,546)\n S2.add_peak(103.9,598)\n S2.pep_mass=100\n S2.euclidean_scale()\n\n score,peaks=similarity.cosine_score_max(S1,S2)\n assert peaks==0, \"Incorrect number of peaks matched with greedy method\"\n assert score==0, \"Incorrect score with greedy method\"\n \n\n score,peaks=similarity.cosine_score_greedy(S1,S2)\n assert peaks==0, \"Incorrect number of peaks matched with maximum weighted method\"\n assert score==0, \"Incorrect score with maximum weighted method\"",
"def matchProfilePeak(pkp, p, w): #{\n vrbMsg(5, 'matchProfilePeak() pkp = ' + str(pkp) + \n ', p = ' + str(p) + ',w = ' + str(w))\n pi = None\n off = 2 * w\n p0 = p - w\n p1 = p + w\n for i in range(0, len(pkp)): #{\n if ((pkp[i] > p0) and (pkp[i] < p1)): #{\n o2 = abs(pkp[i] - p)\n if(o2 < off): #{\n pi = i\n off = o2\n #}\n break\n #}\n #}\n vrbMsg(5, 'matchProfilePeak() pi = ' + str(pi))\n return pi",
"def ppd(self):\n return math.sqrt(np.dot(self.v, self.v) / np.dot(self.w, self.w) )",
"def test_psnr_with_two_completely_different_sets(self):\n low = np.zeros((10, 500, 500, 1), dtype=np.uint8)\n high = np.ones((10, 500, 500, 1), dtype=np.uint8) * 255\n\n avg_psnr = np.array(psnr(high, low)).mean()\n self.assertEqual(avg_psnr, 0.0)",
"def extract_using_paf(comps, data_covar, noise_covar=None, verbose=False):\n\n new_comps = np.copy(comps)\n new_props = np.zeros(new_comps.shape[0])\n\n for step in range(PAF_OPTS['max_iter']):\n\n old_comps, old_props = new_comps, new_props\n\n new_comps, new_props = _paf_step(\n old_comps,\n data_covar,\n noise_covar=noise_covar)\n\n err = np.mean((new_props - old_props)**2)\n\n if verbose:\n print(\"Iteration {} error: {}\".format(step, err))\n\n if err < PAF_OPTS['tol']:\n break\n\n return new_comps",
"def calEachCrossflow2peak():\n \n crossFlow = pd.read_csv('Data_crossflow.csv', index_col = 'Unnamed: 0')\n peakCross = crossFlow['Node2']\n crossFlowPeakFactor = peakCross/0.8\n \n peakCross2 = crossFlow['Node6']\n crossFlowPeakFactor2 = peakCross2/0.8\n #original_factor = peakCross/0.8\n #need to judge the sign of lateral flow according to CTF rule!!\n gapsToFlip = [2,4,6,7,9,11,13,14,16,18,20,21] #gaps in y direction\n gapsToFlipIndex = [x - 1 for x in gapsToFlip]\n for index in gapsToFlipIndex:\n crossFlowPeakFactor[index] = -crossFlowPeakFactor[index] \n crossFlowPeakFactor2[index] = -crossFlowPeakFactor2[index]\n \n return crossFlowPeakFactor, crossFlowPeakFactor2",
"def psu2ppt(psu):\n\n a = [0.008, -0.1692, 25.3851, 14.0941, -7.0261, 2.7081]\n return (a[1] + a[2] * psu ** 0.5 + a[3] * psu + a[4] * psu ** 1.5 + a[5] *\n psu ** 2 + a[6] * psu ** 2.5)",
"def Pp(nccd):\n return (128.1-56.9) * (nccd - 1) / (6-1) + 56.9",
"def _calc_pval(self):\n t = self.beta / self.stderr_beta\n return (2. * (1. - stats.t.cdf(np.abs(t), self.n - 2)))[0]",
"def get_f_h_gas_comp_out(p: float, s: float) -> float:\n\n return - 1.869892835947070 * 10 ** (-1) * p ** 4 \\\n + 8.223224182177200 * 10 ** (-1) * p ** 3 \\\n + 4.124595239531860 * p ** 2 \\\n - 8.346302788803210 * 10 * p \\\n - 1.016388214044490 * 10 ** 2 * s ** 4 \\\n + 8.652428629143880 * 10 ** 2 * s ** 3 \\\n - 2.574830800631310 * 10 ** 3 * s ** 2 \\\n + 3.462049327009730 * 10 ** 3 * s \\\n + 9.209837906396910 * 10 ** (-1) * p ** 3 * s \\\n - 5.163305566700450 * 10 ** (-1) * p ** 2 * s ** 2 \\\n + 4.076727767130210 * p * s ** 3 \\\n - 8.967168786520070 * p ** 2 * s \\\n - 2.062021416757910 * 10 * p * s ** 2 \\\n + 9.510257675728610 * 10 * p * s \\\n - 1.476914346214130 * 10 ** 3",
"def ppm_to_point(ppm, procs, proc2s):\n \n # It seems that F1 is related to the Y axis, while F2 is related to the X axis\n \n begin = (float(proc2s[\"OFFSET\"]), float(procs[\"OFFSET\"]))\n # End is begin-sw_p/sf, so step is (end-begin)/si, which simplifies to\n # (-sw_p/sf+1)/si\n step = [(-float(p[\"SW_p\"])/float(p[\"SF\"]))/float(p[\"SI\"]) \n for p in [proc2s, procs] ]\n \n return [(ppm[i]-begin[i])/step[i] for i in (0,1)]",
"def _predict_p(self, f):\n return self.p * np.exp(self.dbeta * f)",
"def get_proteome_correct_percentages(prots_filtered_feathers, outpath, length_filter_pid=None,\n copynum_scale=False, copynum_df=None,\n force_rerun=False):\n if ssbio.utils.force_rerun(flag=force_rerun, outfile=outpath):\n prot_tracker = defaultdict(int)\n big_strain_counts_df = pd.DataFrame()\n first = True\n for feather in prots_filtered_feathers:\n loaded = load_feather(protein_feather=feather, length_filter_pid=length_filter_pid,\n copynum_scale=copynum_scale,\n copynum_df=copynum_df)\n\n if first:\n big_strain_counts_df = pd.DataFrame(columns=loaded.columns)\n first = False\n tmp_df = pd.DataFrame(columns=loaded.columns)\n for strain in loaded.columns:\n prot_tracker[strain] += 1\n totals = list(filter(lambda x: x.endswith('total'), loaded[strain].index))\n for t in totals:\n counts = t.rsplit('_', 1)[0]\n aa_counts = list(\n filter(lambda x: (x.startswith(counts) and x not in totals), loaded[strain].index))\n for aa_count in aa_counts:\n tmp_df.at[aa_count.replace('count', '%'), strain] = loaded[strain][aa_count] / \\\n loaded[strain][t]\n big_strain_counts_df = big_strain_counts_df.add(tmp_df, fill_value=0)\n\n for c, total in prot_tracker.items():\n big_strain_counts_df.loc[:, c] /= total\n\n if len(big_strain_counts_df) > 0:\n big_strain_counts_df.astype(float).reset_index().to_feather(outpath)\n return big_strain_counts_df\n else:\n return pd.read_feather(outpath).set_index('index')",
"def _get_lip_best(self) -> float:\n pass",
"def prepare_data(f, p):\n ff = f.copy()\n pp = p.copy()\n p = p[f > 1.0]\n f = f[f > 1.0]\n\n smoo = int(1.0 / (f[1]-f[0]))\n p = rebin(p, smoo)\n f = rebin(f, smoo)\n return f, p, ff, pp, smoo",
"def p2f (p):\n #return 11000**((p+1)/2)\n #return (p+1)*11000\n return (p+1)*5500",
"def propabilityLVQ(self):\n self.labels = self.labelingLVQ()\n for i in range(self.labels.shape[0]):\n for j in range(self.labels.shape[1]):\n for k in range(self.labels.shape[2]):\n total = sum(self.labels[i, j, k] for i in range(self.labels.shape[0]))\n if total == 0. :\n continue\n else:\n self.propa[i, j, k] = self.labels[i, j, k] / total\n self.propa[i, j, k] = round(self.propa[i, j, k], 2)\n return self.propa",
"def pc_throughput_avg(self):\n return _TestA_swig.cleanslate_sptr_pc_throughput_avg(self)",
"def preprocess_on_cluster(sff_fps, log_fp, fasta_fp=None, out_fp=\"/tmp/\",\r\n squeeze=False, verbose=False,\r\n primer=STANDARD_BACTERIAL_PRIMER):\r\n cmd = \"denoiser_preprocess.py -i %s -l %s -o %s\" %\\\r\n (\",\".join(sff_fps), log_fp, out_fp)\r\n if (fasta_fp):\r\n cmd += \" -f %s\" % fasta_fp\r\n if(squeeze):\r\n cmd += \" -s\"\r\n if verbose:\r\n cmd += \" -v\"\r\n if primer:\r\n cmd += \" -p %s\" % primer\r\n\r\n submit_jobs([cmd], \"pp_\" + make_tmp_name(6))\r\n\r\n wait_for_file(out_fp + \"/prefix_mapping.txt\", 10)",
"def test_dpss(self):\n v = np.load\n\n datafile = os.path.join(os.path.dirname(__file__), 'data', 'dpss.npz')\n v = np.load(datafile)['v']\n\n v2, lamb, theta = dpss(512, 2.5, 2)\n # No NaNs are supposed to be in the output.\n self.assertEqual(np.isnan(v2).any(), False)\n self.assertEqual(np.isnan(lamb).any(), False)\n self.assertEqual(np.isnan(theta).any(), False)\n # Taper 1, normalize for precision\n np.testing.assert_almost_equal(v2[:, 0] / v[:, 0], v[:, 0] / v[:, 0])\n # Taper 2, normalize for precision\n np.testing.assert_almost_equal(v2[:, 1] / v[:, 1], v[:, 1] / v[:, 1])\n\n # Do the same but with spline interpolation.\n v3, lamb2, thetha2 = dpss(512, 2.5, 2, npts_max=400)\n # Test both tapers. They are not exactly equal therefore only two\n # digits are compared.\n np.testing.assert_almost_equal(v3 / v3, v2 / v3, 2)",
"def pc_nproduced_avg(self):\n return _TestA_swig.cleanslate_sptr_pc_nproduced_avg(self)",
"def ppf(self,x):\n if self.base == 'natural':\n ppfValue = math.exp((self.upperBound-self.lowerBound)*x + self.lowerBound)\n else:\n ppfValue = 10.**((self.upperBound-self.lowerBound)*x + self.lowerBound)\n return ppfValue",
"def generatePhasingScore(options,phase,cycle):\n score,readcount,readseq=readDataForPhasingScoreComputation(options,phase)\n phased_loci_filename=options.output_directory_per_run+\"/\"+options.input_filename+\"_\"+str(phase)+\"_\"+str(cycle)+\".positive_phase_loci\"\n final_phase_loci=options.output_directory_per_run+\"/\"+options.input_filename+\"_\"+str(phase)+\"_\"+str(cycle)+\".phasing_score_phase_loci\"\n fhr=open(phased_loci_filename,\"r\")\n out4=open(final_phase_loci,\"w\")\n for line in fhr:\n chromosome,ss,ee=line.strip().split()\n ss=int(ss)\n ee=int(ee)\n #correct=list(range(ss,ee+1,phase))\n phasing_score_filename=options.output_directory_per_run+\"/\"+str(phase)+\"_\"+str(chromosome)+\"_\"+str(ss)+\"_\"+str(ee)+\".phasing_score\"\n abundance_score_filename=options.output_directory_per_run+\"/\"+str(phase)+\"_\"+str(chromosome)+\"_\"+str(ss)+\"_\"+str(ee)+\".abundance\"\n out=open(phasing_score_filename,\"w\")\n out2=open(abundance_score_filename,\"w\")\n score_count={}\n for site in range(ss,ee+1):\n start=site-(phase*4)\n end=site+(phase*5)-1\n max_within_site,max_within_count,all_scores=0,0,0\n for cor in range(start,end+1):\n if cor not in score[chromosome]:continue\n all_scores+=score[chromosome][cor]\n for i in readcount[chromosome][cor]:\n if max_within_count<readcount[chromosome][cor][i]:\n max_within_site=cor\n max_within_count=readcount[chromosome][cor][i]\n all_scores-=max_within_count\n P,k=0,0\n s=start\n while s<end:\n if s not in score[chromosome]:\n s+=phase\n continue\n if score[chromosome][s]!=0:\n P+=score[chromosome][s]\n k+=1\n if s == max_within_site:\n P-=max_within_count \n s+=phase\n U=all_scores-P\n \n #if U<0: continue\n if k>=3:\n #print(P,U,k)\n phas_score=math.log((1+(10*(P/(1+U))))**(k-2))\n \"\"\"if phas_score>max and site in correct:\n max=phas_score\"\"\"\n else:\n phas_score=0\n out.write(str(site)+\"\\t\"+str(phas_score)+\"\\n\")\n out4.write(chromosome+\"\\t\"+str(site)+\"\\t\"+str(phas_score)+\"\\n\")\n if chromosome not in score_count:\n score_count[chromosome]={}\n if site not in score_count[chromosome]:\n score_count[chromosome][site]=phas_score\n if site in readcount[chromosome] and '+' in readcount[chromosome][site] and readcount[chromosome][site]['+']!=0:\n out2.write(str(site)+\"\\t\"+str(readcount[chromosome][site]['+'])+\"\\n\")\n if site in readcount[chromosome] and '-' in readcount[chromosome][site] and readcount[chromosome][site]['-']!=0:\n out2.write(str(site)+\"\\t-\"+str(readcount[chromosome][site]['-'])+\"\\n\")\n out.close()\n out2.close()\n \n #out4.write(chromosome+\"\\t\"+str(ss)+\"\\t\"+str(ee)+\"\\t\"+str(phas_score)+\"\\n\")\n out4.close()",
"def readPSF(self,phys,psfname):\r\n PSFReader.PSFReader(self.checkPath(psfname)).read(phys.myPSF)\r\n phys.build()",
"def smdape(self) -> float:\n return float(np.median(2.0 * self._ae() / ((np.abs(self.true) + np.abs(self.predicted)) + EPS)))"
] | [
"0.58542764",
"0.58267516",
"0.5633884",
"0.5620691",
"0.5568786",
"0.5546477",
"0.5538438",
"0.5512286",
"0.5491357",
"0.5486278",
"0.53827614",
"0.535951",
"0.5333681",
"0.53270435",
"0.5324692",
"0.5322697",
"0.53162724",
"0.5314971",
"0.5281472",
"0.5278054",
"0.5277989",
"0.52655154",
"0.5253501",
"0.5233621",
"0.5231598",
"0.5224775",
"0.52214766",
"0.5193918",
"0.5190423",
"0.51874524",
"0.5175658",
"0.517327",
"0.5150704",
"0.51468617",
"0.5143615",
"0.514303",
"0.5138657",
"0.51355344",
"0.51355344",
"0.51057154",
"0.50999206",
"0.5094778",
"0.5087201",
"0.5080451",
"0.5078265",
"0.50778383",
"0.50713915",
"0.5052467",
"0.5052378",
"0.50476265",
"0.50186926",
"0.50163686",
"0.501333",
"0.50119007",
"0.50067836",
"0.50026935",
"0.5000089",
"0.4996954",
"0.499042",
"0.4986786",
"0.49864537",
"0.49813402",
"0.49810636",
"0.49800846",
"0.49743754",
"0.49731752",
"0.4972408",
"0.49714682",
"0.49689573",
"0.49681765",
"0.49667838",
"0.49665508",
"0.49657187",
"0.49639708",
"0.49639708",
"0.4962866",
"0.4954891",
"0.49535674",
"0.49523273",
"0.49479392",
"0.49466893",
"0.49437907",
"0.49401516",
"0.49326015",
"0.49314973",
"0.49299794",
"0.49288756",
"0.49286813",
"0.49264663",
"0.49261013",
"0.49258533",
"0.49238822",
"0.4923495",
"0.49207187",
"0.49183986",
"0.49082077",
"0.48994493",
"0.48991847",
"0.4896025",
"0.48950934"
] | 0.60214907 | 0 |
Compute the PSF and the Strehl ratio | def compute_PSF(self, coef, crop=True):
phase = np.dot(self.RBF_mat, coef) + self.defocus
pupil_function = self.pupil_mask * np.exp(2 * np.pi * 1j * phase)
image = (np.abs(fftshift(fft2(pupil_function))))**2
try:
image /= self.PEAK
except AttributeError:
# If self.PEAK is not defined, self.compute_PSF will compute the peak
pass
strehl = np.max(image)
if crop:
image = image[self.minPix:self.maxPix, self.minPix:self.maxPix]
else:
pass
return image, strehl | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_percentage_sf_votes(self):\n\n votes_f = self.get_num_f_votes()\n votes_sf = self.get_num_sf_votes()\n\n # avoid dividing by zero\n if votes_f + votes_sf == 0:\n return 0\n else:\n ratio = float(votes_sf)/(votes_f + votes_sf)\n return round(ratio * 100, 1)",
"def __calc_s(self, df):\n df.loc[:, \"avg_num_drivers\"] = df.idle + df.incoming\n s = df.total / df.avg_num_drivers # df.total := amount of demand\n s[s > 1] = 1\n s[np.isnan(s)] = 0.0001\n s[np.isinf(s)] = 1\n\n df.loc[:, \"prob_of_s\"] = s\n df = df[[\"zone_id\", \"prob_of_s\"]]\n return df",
"def pss(self):\n return (self.table[0, 0] * self.table[1, 1] - self.table[0, 1] * self.table[1, 0]) / \\\n ((self.table[0, 0] + self.table[1, 0]) * (self.table[0, 1] + self.table[1, 1]))",
"def sharpe_ratio(adr,sddr,sf=252,rfr=0.0):\n rfr=((1.0 + rfr) ** (1/sf)) - 1 # Daily risk free return. This is the shortcut to calculate daily (sf=252) risk free return\n return sf**(1.0/2)*(adr-rfr)/sddr",
"def calculateP(SD, numDiff):\n return numDiff/SD",
"def calculateP(SD, numDiff):\n return numDiff/SD",
"def prf(stats):\n if stats['pred'] == 0:\n return 0, 0, 0\n p = stats['corr']/stats['pred']\n r = stats['corr']/stats['gold']\n if p > 0 and r > 0:\n f = 2*p*r/(p+r)\n else:\n f = 0\n return p, r, f",
"def rmspe(self) -> float:\n return float(np.sqrt(np.mean(np.square(((self.true - self.predicted) / self.true)), axis=0)))",
"def calculateSaleReturn(S,R,F,T):\n if (T > S):\n return 0\n\n if F == 100:\n return R*T/S\n\n return float(R) * ( 1.0 - math.pow(float(S-T)/float(S) , (100.0/float(F))))",
"def get_percentage_f_votes(self):\n\n votes_f = self.get_num_f_votes()\n votes_sf = self.get_num_sf_votes()\n\n # avoid dividing by zero\n if votes_f + votes_sf == 0:\n return 0\n else:\n ratio = float(votes_f)/(votes_f + votes_sf)\n return round(ratio * 100, 1)",
"def sharpe_ratio(r1, r2, rf, o1, o2, cov):\n def sr(x):\n w1 = x[0]\n w2 = 1 - w1\n\n Rp = w1 * r1 + w2 * r2\n STDEVp = math.sqrt(portfolio_variance(o1, o2, cov)(x))\n R = (Rp - rf) / STDEVp\n return R\n return sr",
"def phosfracs(h,ks):\n k1p,k2p,k3p = ks\n h3po4 = h*h*h\n h2po4 = k1p*h*h\n hpo4 = k1p*k2p*h\n po4 = k1p*k2p*k3p\n denom = h3po4 + h2po4 + hpo4 + po4\n h3po4 /= denom\n h2po4 /= denom\n hpo4 /= denom\n po4 /= denom\n return h3po4,h2po4,hpo4,po4",
"def rF(count, total):\n\treturn float(count)/float(total)",
"def get_f_score(self):\n return self.get_g_score() + self.get_h_score()",
"def calculate(self):\n\n gt = self.ground_truth.flatten()\n seg = self.segmentation.flatten()\n\n n = gt.size\n mean_gt = gt.mean()\n mean_seg = seg.mean()\n mean = (mean_gt + mean_seg) / 2\n\n m = (gt + seg) / 2\n ssw = np.power(gt - m, 2).sum() + np.power(seg - m, 2).sum()\n ssb = np.power(m - mean, 2).sum()\n\n ssw /= n\n ssb = ssb / (n - 1) * 2\n\n return (ssb - ssw) / (ssb + ssw)",
"def calculateSaleReturn(S,R,F,T):\n if (T > S):\n return 0\n\n if F == 100:\n return int(R- R*T/S)\n\n return int(R * ( 1.0 - math.pow(float(S-T)/float(S) , (100.0/F))))",
"def sharpe_ratio(self, r_f):\n return (\n self.cumulative_returns().last('1D').iat[0] - r_f\n ) / self.cumulative_returns().std()",
"def _compute_f(self, p, dh, dv):\n return dh / (self.beta * p * dv)",
"def eval_fis(self,fis):\n #res = 0.0\n #for cl_state in self.classes:\n # res += cl_state.eval_fis(fis)\n #print \"=>\",res\n #return 1.0/res\n try:\n correct,count = self.quality_fis(fis)\n except Exception as err:\n print err\n correct = 0\n return correct",
"def pofd(self):\n return self.table[0, 1] / (self.table[0, 1] + self.table[1, 1])",
"def fpr(self):\n return float(self.fp) / (self.fp + self.tn) if self.tn != 0 else 1",
"def get_sharpe_ratio(allocs, prices):\n\tport_val = get_portfolio_value(prices, allocs, start_val=1.0)\n\tsharpe_ratio = get_portfolio_stats(port_val, daily_rf=0.0, samples_per_year=252)[3]\n\treturn -sharpe_ratio",
"def calculateR(sapienses: list) -> float:\n r = 0\n for i in sapienses:\n r = r + i.numberInfected\n r=r/I0\n r = r*S/(S+R+D)\n return r",
"def P(lag):\n N = len(SP)\n ratios = SP[lag:N]/SP[0:N-lag]\n P = 100.*(ratios-1.)\n return P",
"def pe_ratio(self):\n try:\n return self.price / self.dividend_yield\n except ZeroDivisionError:\n return 0.0",
"def calc_max_frac_diff_between_two_psf(fp_psf1, fp_psf2):\n\tnx, ny = 43, 43\n\tpsf1 = fits.getdata(fp_psf1) \n\tpsf2 = fits.getdata(fp_psf2)\n\n\tpsf1 = normalize_kernel(psf1)\n\tpsf2 = normalize_kernel(psf2)\n\n\tpsf1 = pad_edge_to_shape(psf1, nx=nx, ny=ny)\n\tpsf2 = pad_edge_to_shape(psf2, nx=nx, ny=ny)\n\n\tmax_orig = max([psf1.max(), psf2.max()])\n\tmax_diff = np.max(np.absolute(psf1-psf2))\n\n\tfrac_diff = max_diff/max_orig\n\treturn frac_diff",
"def get_f_s_gas(p: float, h: float) -> float:\n return 5.823109493752840 * 10 ** (-2) * p ** 4 \\\n - 3.309666523931270 * 10 ** (-1) * p ** 3 \\\n + 7.700179914440890 * 10 ** (-1) * p ** 2 \\\n - 1.311726004718660 * p \\\n + 1.521486605815750 * 10 ** (-9) * h ** 4 \\\n - 2.703698863404160 * 10 ** (-6) * h ** 3 \\\n + 1.793443775071770 * 10 ** (-3) * h ** 2 \\\n - 5.227303746767450 * 10 ** (-1) * h \\\n + 1.100368875131490 * 10 ** (-4) * p ** 3 * h \\\n + 5.076769807083600 * 10 ** (-7) * p ** 2 * h ** 2 \\\n + 1.202580329499520 * 10 ** (-8) * p * h ** 3 \\\n - 7.278049214744230 * 10 ** (-4) * p ** 2 * h \\\n - 1.449198550965620 * 10 ** (-5) * p * h ** 2 \\\n + 5.716086851760640 * 10 ** (-3) * p * h \\\n + 5.818448621582900 * 10",
"def findVWSP(self):\n num=0\n den=0\n ban=False\n for el in self.TL:\n if datetime.fromtimestamp(el.TS) > (datetime.now()-timedelta(minutes = 15)):\n ban=True\n num+=el.Price * el.NoSh\n den+= el.NoSh \n if ban:\n if den!=0:\n return num/den\n else:\n raise BaseException(\"Oops! the vwsp cannot be computed.\")\n else:\n return 0",
"def calP(self):\n N = len(self.listOfParticles)\n m = self.listOfParticles[0].m\n vsum = 0\n for particle in self.listOfParticles:\n vsum += particle.V.len()\n A = np.pi*self.R**2\n F = 0.5 * A * (2*self.R) * m * N * vsum**2\n return F",
"def test_dpss(self):\n v = np.load\n\n datafile = os.path.join(os.path.dirname(__file__), 'data', 'dpss.npz')\n v = np.load(datafile)['v']\n\n v2, lamb, theta = dpss(512, 2.5, 2)\n # No NaNs are supposed to be in the output.\n self.assertEqual(np.isnan(v2).any(), False)\n self.assertEqual(np.isnan(lamb).any(), False)\n self.assertEqual(np.isnan(theta).any(), False)\n # Taper 1, normalize for precision\n np.testing.assert_almost_equal(v2[:, 0] / v[:, 0], v[:, 0] / v[:, 0])\n # Taper 2, normalize for precision\n np.testing.assert_almost_equal(v2[:, 1] / v[:, 1], v[:, 1] / v[:, 1])\n\n # Do the same but with spline interpolation.\n v3, lamb2, thetha2 = dpss(512, 2.5, 2, npts_max=400)\n # Test both tapers. They are not exactly equal therefore only two\n # digits are compared.\n np.testing.assert_almost_equal(v3 / v3, v2 / v3, 2)",
"def calculateZScoreAndPValue(m1, s1, n1, m2, s2, n2): \n z_val = (m1 - m2) / np.sqrt(float( ((s1**2)/float(n1)) + ((s2**2)/float(n2))) )\n cdf_one_sided = scipy.stats.norm.cdf(z_val)\n p_val = 1 - cdf_one_sided \n return z_val, p_val",
"def calcSFP(gamma_tilt_deg, created_pal, SFP_config, Tech_res):\n # general\n gamma_n = gamma_tilt_deg / 180 * np.pi\n N = SFP_config.N\n \n ########################## SIMULATION SETUP ###############################\n # reference pressure\n p0 = 2 * 10**(-5)\n # frequencies\n f = get_freq_vec(N_freq=120, step_freq=1/12, freq_range=[20,20000])\n f_xy = np.array([100, 200, 400, 800, 1000, 2000, 5000, 10000, 16000])\n\n # initialize variables\n omega = 2 * np.pi * f\n omega_xy = 2 * np.pi * f_xy\n D_opt_LSA = np.ones([N, np.shape(f)[0]])\n P_LSA = np.zeros([np.shape(created_pal.xline)[0],np.shape(f)[0]], dtype=complex)\n\n # air attenuation\n alpha, c = AirAbsorptionCoefficient(f, T=293.15, p=101.325*10**(3), h=50)\n\n # directivity\n # if PALC_config.directivity not in ['Measured Loudspeaker Data']:\n # dire_meas_LSA = np.ones([np.shape(f)[0],np.shape(f)[0]])\n # dire_meas_deg = np.ones([np.shape(f)[0],np.shape(f)[0]])\n \n ######################### SPL CALCULATION #################################\n x_start, y_start, x_stop, y_stop, x_c_n, y_c_n, x_S, y_S = source_pos(gamma_n, SFP_config)\n\n for n in range(np.shape(f)[0]):\n G_LSA_vert = CalcGreenFunctions(created_pal.xline, created_pal.yline, np.array([0]), \\\n x_c_n, y_c_n, 0.82, SFP_config.directivity, \\\n SFP_config.Lambda_y, gamma_n, c, omega[n], 1, \\\n np.array(SFP_config.dir_meas[:,n]), \\\n np.array(SFP_config.dir_meas_deg[:,1]), \\\n alpha[n], f, n )\n\n P_LSA[:,n] = G_LSA_vert @ D_opt_LSA[:,n] # D_opt_LSA possibility to include driving functions\n p_SPL = 20 * np.log10(np.abs(P_LSA) / p0)\n Tech_res.update_tech_meas(p_SPL=p_SPL, f=f)\n return x_S, y_S",
"def ps(image):\n\timage = image.astype(float)\n\tps_img = abs(pow(fft2(image), 2))\n\treturn ps_img",
"def getFPSA1(ChargeSA):\n temp=0.0\n for i in ChargeSA:\n temp=temp+i[2]\n if temp == 0.0:\n return 0.0\n else:\n return getPPSA1(ChargeSA)/temp",
"def pv(rate, n_years):\n return 1 / fv(rate, n_years)",
"def _rsq(self):\n return self._ss_reg / self._ss_tot",
"def getFPSA2(ChargeSA):\n temp=0.0\n for i in ChargeSA:\n temp=temp+i[2]\n if temp == 0.0:\n return 0.0\n else:\n return getPPSA2(ChargeSA)/temp",
"def _calculate_snr_spread(self):\n\n dmSpacing, percentage = 100, 0\n while percentage < 0.5: \n x = np.linspace(self.centerDm - dmSpacing, self.centerDm + dmSpacing, 500)\n y = np.array([self.effective_snr(self.effective_width(self.pulseWidth, self.centerDm - dm_val, self.bandwidth, self.freq), self.pulseWidth * 20) for dm_val in x])\n y = (y / (np.max(y) * 1.0)) if np.max(y) > 0 else y\n percentage = np.size(np.where(y > 0)) / 1000.0\n dmSpacing = dmSpacing*0.6\n \n return x, y",
"def fs2ps2D(px, s):\n\t\tsfun = psarclength(px)\t\n\t\treturn sfun-s",
"def Fpond(presHead, Ks, thetaSat, thetaInit, rainfallRate):\n numerator = np.absolute(presHead)*Ks*(thetaSat - thetaInit)\n denominator = rainfallRate - Ks\n\n Fp = numerator/denominator\n return Fp",
"def golden_ratio():\n print((1+math.sqrt(5))/2)",
"def getTotalBusinessPercentForSalePercents(sc_sp:float, lc_sp:float):\n sc_tbp,lc_tbp,actualRainPercent = 0.0,0.0,0.0\n for i in range(int(Problem2.TOTAL_WEEKS)):\n isRainy = Problem2.eventOccurred(Problem2.RAIN_PERCENT)\n if isRainy:\n actualRainPercent += 1\n sc_hasSale = Problem2.eventOccurred(sc_sp)\n lc_hasSale = Problem2.eventOccurred(lc_sp)\n sc_ns_bp, lc_ns_bp, ss_bp = Problem2.setBusinessPercents(isRainy)\n sc_bp, lc_bp = Problem2.getBusinessPercentBySales(sc_hasSale, lc_hasSale, sc_ns_bp, lc_ns_bp, ss_bp)\n sc_tbp += sc_bp\n lc_tbp += lc_bp\n\n sc_tbp /= Problem2.TOTAL_WEEKS\n lc_tbp /= Problem2.TOTAL_WEEKS\n actualRainPercent /= Problem2.TOTAL_WEEKS\n return sc_tbp, lc_tbp, actualRainPercent",
"def computeFScores(self, targetLabels, actualLabels):\r\n if self.prMeasures is None:\r\n self.prMeasures = self.computePRMeasures(targetLabels, actualLabels)\r\n if self.prMeasures[0] == 0:\r\n return 0\r\n self.f1score = 2 * self.prMeasures[0] * self.prMeasures[1] / (0.0 + self.prMeasures[0] + self.prMeasures[1])\r\n return self.f1score",
"def phs(x, y, rbfParam) :\n return (x**2 + y**2) ** (rbfParam/2)",
"def _residual_edp(self, params):\n data = self.F**2\n model = np.absolute(self._model())**2\n sigma = self.sigma\n return (data[self.mask]-model[self.mask]) / sigma[self.mask] \n \n # The following three lines do not reproduce Sun's results, which proves\n # that the fits were done through intensity, not form factor.\n #data = self.F\n #model = np.absolute(self._model())\n #return (data - model) ",
"def calculate(self) -> float:",
"def p(party, vote_count, s):\n return t(party, vote_count) / d(s)",
"def getRatio(probe_num, position_vector, shot_range, dir, day ='050119r'):\n ratio_x = 0\n ratio_y = 0\n ratio_z = 0\n # helm_B = [0,0,0]\n divideby = 0\n for shot in range(shot_range[0], shot_range[1]+1):\n print( 'On shot ', day+str(shot), ' for probe ',probe_num)\n x,y,z, currmax,helmB_new = probe_calib(day+str(shot), probe_num, position_vector,dir)\n ratio_x = ratio_x + x\n ratio_y = ratio_y + y\n ratio_z = ratio_z + z\n # helm_B = [helm_B[i] + helmB_new[i] for i in len(helmB)]\n divideby = divideby + 1 #averaging over the number of shots\n ratio_Bx = ratio_x/divideby\n ratio_By = ratio_y/divideby\n ratio_Bz = ratio_z/divideby\n # helmB = [helm_B]/divideby\n # print ratio_Bx, ratio_By, ratio_Bz, helmB\n # print(\"ratio_Bx %f, ratio_By %f, ratio_Bz %f, helmB%s\"%(ratio_Bx, ratio_By, ratio_Bz, helmB))\n Bx_sqr =ratio_x**2\n By_sqr =ratio_y**2\n Bz_sqr =ratio_z**2\n B = Bx_sqr + By_sqr+ Bz_sqr\n norm_factor = np.sqrt(B)\n ratio_Bx, ratio_By, ratio_Bz = [ratio_Bx, ratio_By, ratio_Bz]/norm_factor\n\n return (ratio_Bx, ratio_By, ratio_Bz, norm_factor)",
"def BatageljBren_calc(TP, FP, FN, TN):\n try:\n return (FP * FN) / (TP * TN)\n except Exception:\n return \"None\"",
"def sharpe_ratio(factor_returns, annualization_factor):\r\n\r\n return annualization_factor * factor_returns.mean() / factor_returns.std()",
"def fs_ratio(self):\n return self._fs_ratio",
"def astrom_precision(fwhm, snr):\n result = fwhm/(snr)\n return result",
"def compute_portfolio_stats(allocs,prices,rfr=0, sf=252):\n\n # portfolio value\n port_val = compute_port_val(allocs, prices)\n\n daily_rets = port_val/port_val.shift(1) - 1\n daily_rets = daily_rets[1:]\n\n # cumulative return\n cr = port_val.iloc[-1]/port_val.iloc[0] -1\n\n # avg daily return\n adr = daily_rets.mean()\n\n # std dev of daily return\n sddr = daily_rets.std()\n \n #sharpe_ratio\n k = math.sqrt(252)\n \n sr = k * ((daily_rets - 0).mean() / daily_rets.std())\n \n return cr, adr, sddr, sr",
"def p2f(self):\n\n stale = self.m_f\n self.m_f = self.v.b / self.m_v",
"def stretch_factor(self):\n p = self._pants_decomposition\n\n # pick a curve to iterate\n c = PantsLamination.random(p)\n # print(c)\n\n cc = (self**100) * c\n # print(self**100)\n # print(cc)\n return float(sum(abs(x) for x in (self*cc).to_vector())) / \\\n sum(abs(x) for x in cc.to_vector())",
"def smdape(self) -> float:\n return float(np.median(2.0 * self._ae() / ((np.abs(self.true) + np.abs(self.predicted)) + EPS)))",
"def calculate_profit(self):",
"def adv_ratio(self): # XXX\r\n bw = StatsRouter.global_bw_mean\r\n if bw == 0.0: return 0\r\n else: return self.bw/bw",
"def f1(predictions, gold):\n if len(gold) == 0:\n return 1. if len(predictions) == 0 else 0.\n if len(predictions) == 0:\n return 0.\n predictions_set = set(predictions)\n gold_set = set(gold)\n nom = 2 * len(predictions_set.intersection(gold_set))\n denom = len(predictions_set) + len(gold_set)\n return float(nom)/float(denom)",
"def calculate_f_p(genes, gene_abundance_file, gene_molecular_weight_file):\n gene_abundance = pd.read_csv(gene_abundance_file, index_col=0)\n gene_molecular_weight = json_load(gene_molecular_weight_file)\n enzy_abundance = 0\n pro_abundance = 0\n for gene_i in gene_abundance.index:\n if gene_i in gene_molecular_weight.keys():\n abundance = gene_abundance.loc[gene_i, 'abundance'] * \\\n gene_molecular_weight[gene_i]/1000\n pro_abundance += abundance\n if gene_i in genes.index:\n enzy_abundance += abundance\n f = enzy_abundance/pro_abundance\n return f",
"def stump_S(z) :\n\n if z > 0:\n sz = sqrt(z) \n return (sz - sin(sz))/pow(sz,3)\n elif z < 0 :\n s_z = sqrt(-z) \n # According to the equation the denominatori is pow(sqrt(z),3)\n return (sinh(s_z) - s_z)/pow(s_z,3)\n else :\n return 0.1666666666666666",
"def silverman(n: int, ess: float) -> float:\n\n return (ess * (n + 2) / 4) ** (-1 / (n + 4))",
"def calculate_f(f, s = None, f_err = None, s_err = None, scale = 1000):\n if s is None:\n return f, f_err\n else:\n f0 = f * s / gamma(1./s)\n if (f_err is not None) and (s_err is not None):\n sigma = np.sqrt(f_err ** 2 + ((s + polygamma(0, 1/s))/s/gamma(1/s)* s_err)**2)\n else:\n sigma = None\n return f0, sigma",
"def measureSJF(p):\r\n\treturn measureFCFS(msort(p))",
"def strm_bw_ratio(self):\r\n bw = self.bwstats.mean\r\n if StatsRouter.global_strm_mean == 0.0: return 0\r\n else: return (1.0*bw)/StatsRouter.global_strm_mean",
"def getRPSA(ChargeSA):\n temp=0.0\n for i in ChargeSA:\n temp=temp+i[2]\n if temp == 0.0:\n return 0.0\n return getPSA(ChargeSA)/temp",
"def stats(detections, faces):\n vp, fp, fn, vn = 0, 0, 0, 0\n max_label = np.max(faces[:, 0])\n for i in range(max_label + 1):\n detections_i = get_label_with_index(detections, i)\n faces_i = get_label_with_index(faces, i)\n local_vp = 0\n for face in faces_i:\n found = False\n for detection in detections_i:\n if intersection_ratio(face, detection) >= 0.5:\n found = True\n break\n if found:\n vp += 1\n local_vp += 1\n else:\n fn += 1\n fp += len(detections_i) - local_vp\n\n precision = vp / (vp + fp)\n rappel = vp / (vp + fn)\n f_score = 2 * ((precision * rappel) / (precision + rappel))\n\n return precision, rappel, f_score",
"def std_ratio(self, **kwargs) -> float:\n return float(np.std(self.predicted, **kwargs) / np.std(self.true, **kwargs))",
"def shear_est(self, gal_image, psf_image, noise=None, F=False):\n # gal_ps = self.pow_spec(gal_image)\n gal_ps = gal_image\n # gal_ps = hk_tool_box.smooth(gal_ps,self.size)\n if noise is not None:\n nbg = self.pow_spec(noise)\n self.flux2 = numpy.sqrt(gal_ps[int(self.size/2), int(self.size/2)]/numpy.sum(self.rim*gal_ps)*numpy.sum(self.rim))\n # nbg = hk_tool_box.smooth(nbg,self.size)\n # rim = self.border(2, size)\n # n = numpy.sum(rim)\n # gal_pn = numpy.sum(gal_ps*rim)/n # the Possion noise of galaxy image\n # nbg_pn = numpy.sum(nbg*rim)/n # the Possion noise of background noise image\n gal_ps = gal_ps - nbg# + nbg_pn - gal_pn\n\n if F:\n psf_ps = psf_image\n else:\n psf_ps = self.pow_spec(psf_image)\n # self.get_radius_new(psf_ps, 2)\n wb, beta = self.wbeta(self.hlr)\n maxi = numpy.max(psf_ps)\n idx = psf_ps < maxi / 100000.\n wb[idx] = 0\n psf_ps[idx] = 1.\n tk = wb/psf_ps * gal_ps\n\n # ky, kx = self.ky, self.kx\n # #\n # kx2 = kx*kx\n # ky2 = ky*ky\n # kxy = kx*ky\n # k2 = kx2 + ky2\n # k4 = k2*k2\n # mn1 = (-0.5)*(kx2 - ky2) # (-0.5)*(kx**2 - ky**2)\n # mn2 = -kxy # -kx*ky\n # mn3 = k2 - 0.5*beta**2*k4 # kx**2 + ky**2 - 0.5*beta**2*(kx**2 + ky**2)**2\n # mn4 = k4 - 8*kx2*ky2 # kx**4 - 6*kx**2*ky**2 + ky**4\n # mn5 = kxy*(kx2 - ky2) # kx**3*ky - kx*ky**3\n\n # mn1 = self.mn1\n # mn2 = self.mn2\n mn3 = self.k2 - 0.5*beta**2*self.k4\n # mn4 = self.mn4\n # mn5 = self.mn5\n\n mg1 = numpy.sum(self.mn1 * tk)*self.alpha\n mg2 = numpy.sum(self.mn2 * tk)*self.alpha\n mn = numpy.sum(mn3 * tk)*self.alpha\n mu = numpy.sum(self.mn4 * tk)*(-0.5*beta**2)*self.alpha\n mv = numpy.sum(self.mn5 * tk)*(-2.*beta**2)*self.alpha\n\n return mg1, mg2, mn, mu, mv",
"def sharpe_ratio(self,r, riskfree_rate, periods_per_year):\n # Convert the annualized riskfree rate to period \n \n rf_per_period = (1+riskfree_rate) ** (1/periods_per_year) - 1\n \n excess_ret = r - rf_per_period\n \n ann_ex_ret = self.annualize_rets(excess_ret, periods_per_year)\n \n ann_vol = self.annualize_vol(r, periods_per_year)\n\n return ann_ex_ret / ann_vol",
"def getFPSA3(ChargeSA):\n temp=0.0\n for i in ChargeSA:\n temp=temp+i[2]\n if temp == 0.0:\n return 0.0\n else:\n return getPPSA3(ChargeSA)/temp",
"def rmdspe(self) -> float:\n return float(np.sqrt(np.median(np.square(self._percentage_error()))) * 100.0)",
"def ratio_local_prod(self):\n if self.current_energy_produced == 0.0:\n return 1.0\n else:\n return 1. - self.export_grid / self.current_energy_produced",
"def _comput_PSNR(self, input, target):\n shave = 4\n ch, h, w = input.size()\n input_Y = rgb2ycbcrT(input.cpu())\n target_Y = rgb2ycbcrT(target.cpu())\n diff = (input_Y - target_Y).view(1, h, w)\n\n diff = diff[:, shave:(h - shave), shave:(w - shave)]\n mse = diff.pow(2).mean()\n psnr = -10 * np.log10(mse)\n return psnr",
"def points_per_dollar(self):\n if float(self.draftkings_salary) == 0.0:\n return 0.0\n\n return float(self.predicted_draftkings_points) / float(self.draftkings_salary)",
"def golden_ratio():\n return 1.61803398875",
"def fr2sp(fr):\n return (math.sqrt(2)/(2*math.pi*fr))",
"def _calcFs(pop_indivs, pop_counts):\n pop_names = pop_counts.keys()\n r = len(pop_names)\n n_i = []\n for pop_name in pop_names:\n n_i.append(len(pop_indivs[pop_name]))\n n = reduce(lambda x, y: x+y, n_i)\n n_bar = 1.0 * n / r\n n_c = n\n for ni in n_i:\n n_c -= 1.0*(ni**2)/n\n n_c = n_c / (r-1)\n\n alleles = _get_all_alleles(pop_counts)\n a = 0.0\n b = 0.0\n c = 0.0\n for allele in alleles:\n p_i = []\n for pop_name in pop_names:\n p_i.append(_get_allele_freq(pop_counts[pop_name], allele))\n p_bar = 0.0\n for i in range(len(p_i)):\n p_bar += n_i[i] * p_i[i]\n p_bar = 1.0 * p_bar / n\n s_2 = 0.0\n for i in range(len(p_i)):\n s_2 += n_i[i] * (p_i[i] - p_bar) * (p_i[i] - p_bar)\n h_bar = 0.0\n for i in range(len(p_i)):\n h_bar += _get_het_allele_freq(pop_indivs[pop_name], allele) *n_i[i]\n h_bar = 1.0 * h_bar / n\n a += n_bar / n_c * (s_2 - (p_bar * (1-p_bar) - (r - 1.0) / r * s_2 - h_bar / 4.0) / (n_bar - 1.0) )\n b += n_bar / (n_bar - 1) * (p_bar * (1-p_bar) - (r - 1.0) / r * s_2 - (2 * n_bar - 1) / (4.0 * n_bar) * h_bar )\n c += h_bar / 2.0\n if a + b + c == 0:\n fst = 0.0\n else:\n fst = a / (a + b + c)\n if a + b + c == 0:\n fit = 1.0\n else:\n fit = (1.0 - c) / (a + b + c)\n if b + c == 0:\n fis = 1.0\n else:\n fis = (1.0 - c) / (b + c)\n\n return fst, fit, fis",
"def hss(self):\n return 2 * (self.table[0, 0] * self.table[1, 1] - self.table[0, 1] * self.table[1, 0]) / (\n (self.table[0, 0] + self.table[0, 1]) * (self.table[0, 1] + self.table[1, 1]) +\n (self.table[0, 0] + self.table[1, 0]) * (self.table[1, 0] + self.table[1, 1]))",
"def _calc_r2(self):\n sse = np.sum((self.data.y - self.predict(self.data.x))**2)\n sst = np.sum((self.data.y - self.data.y.mean())**2)\n return (1. - sse/sst)",
"def calc_NPSH(P_suction, P_vapor, rho_liq):\n # Note: NPSH = (P_suction - P_vapor)/(rho_liq*gravity)\n # Taking into account units, NPSH will be equal to return value\n return 0.334438*(P_suction - P_vapor)/rho_liq",
"def _one_sided_p_value(t, df):\n return scipy.stats.t.sf(t, df=df)",
"def calculate_ft(self):\n \n # Create a function which is able to evaluate B**2\n ffunc = scipy.interpolate.interp1d(self.psigrid, self.e.getF()[self.tind])\n def b2_func(R, Z, psi):\n bt = ffunc(psi)/R\n br = -self.psifunc.ev(R, Z, dy=1)/R\n bz = self.psifunc.ev(R, Z, dx=1)/R\n \n return bt**2 + br**2 + bz**2\n \n\n def b_bmax2(R,Z,psi):\n b2 = b2_func(R,Z,psi)\n return b2 / np.max(b2)\n \n def b_bmax(R,Z,psi):\n return np.sqrt(b_bmax2(R,Z,psi))\n \n # Evaluate the flux-surface averaged h^2 and h, as required\n fsa_h2 = self.fs_average(b_bmax2)\n fsa_h = self.fs_average(b_bmax)\n \n # This is the function which gets flux-surface averaged in equation (7)\n def ftl_func(R,Z,psi):\n h = b_bmax(R,Z,psi)\n h2 = b_bmax2(R,Z,psi)\n \n return (1 - (np.sqrt(1 - h) * (1 + 0.5 * h)))/h2\n \n \n # Equation 6, 7 in Lin-Liu\n fs_ftu = 1 - fsa_h2 / fsa_h**2 * (1 - np.sqrt(1 - fsa_h) * (1 + 0.5 * fsa_h))\n fs_ftl = 1 - fsa_h2 * self.fs_average(ftl_func)\n # Equation 18, 19 \n om = 0.75\n self.fs_ft = om*fs_ftu + (1-om)*fs_ftl",
"def getFNSA1(ChargeSA):\n temp = 0.0\n for i in ChargeSA:\n temp = temp + i[2]\n if temp == 0.0:\n return 0.0\n return getPNSA1(ChargeSA)/temp",
"def gsrfp(self, gp, lai):\n\t return (lai*self.gtf()*gp/self.F_CAP)/(self.gtf() + lai*gp/self.F_CAP)",
"def get_S_r(self):\n\n S_r = np.sum((self.eta_model - self.eta_exp) ** 2.)\n\n return S_r",
"def dishlist_avg_cal(n:list)->float:\r\n all_cal = dishlist_cal(n)\r\n return sum(all_cal)/len(all_cal)",
"def vwsp(self):\n trades = Trade.get_instance().get_trades_for_symbol(self.symbol)\n try:\n return sum(tr.price * tr.quantity for tr in trades) / sum(tr.quantity for tr in trades)\n except ZeroDivisionError:\n return 0.0",
"def get_score(fps: float, ssim: float,\n ssim_max: float = 30.,\n rate: float = 10.) -> float:\n fps = np.round(fps, 1)\n ssim = np.round(ssim, 3)\n return 0.1 * np.log(fps) / np.log(rate) + min(ssim, ssim_max)",
"def calculate_fwhm(self, surface, xy_data, PSF_window, N_points, spaxel_scale, wavelength, mode='diffraction'):\n\n start = time()\n # Calculate the Geometric PSF\n x, y = xy_data[:, 0], xy_data[:, 1]\n cent_x, cent_y = np.mean(x), np.mean(y)\n\n # Estimate the Geometric PSF using Kernel Density Estimation. The XY raytrace results are random samples\n # drawn from a probability distribution, the Geometric PSF. KDE estimates that distribution.\n # The main parameter of interest is the 'bandwidth' which defines the width of the kernel that KDE uses to\n # estimate the distribution. A narrower kernel will give a GeoPSF with finer structure; too wide a kernel will\n # just wash away the structure. We found that a bandwidth equal to the standard deviation of the raytrace data\n # works well\n std_x, std_y = np.std(x), np.std(y)\n bandwidth = min(std_x, std_y)\n kde = KernelDensity(kernel='gaussian', bandwidth=1.0*bandwidth).fit(xy_data)\n\n # define a grid to compute the PSF\n xmin, xmax = cent_x - PSF_window/2/1000, cent_x + PSF_window/2/1000\n ymin, ymax = cent_y - PSF_window/2/1000, cent_y + PSF_window/2/1000\n x_grid = np.linspace(xmin, xmax, N_points)\n y_grid = np.linspace(ymin, ymax, N_points)\n xx_grid, yy_grid = np.meshgrid(x_grid, y_grid)\n xy_grid = np.vstack([xx_grid.ravel(), yy_grid.ravel()]).T\n log_scores = kde.score_samples(xy_grid)\n\n psf_geo = np.exp(log_scores)\n psf_geo /= np.max(psf_geo)\n psf_geo = psf_geo.reshape(xx_grid.shape)\n\n time_geopsf = time() - start\n # print(\"Time to estimate GeoPSF: %.3f sec\" % time_geo)\n\n if mode == \"diffraction\":\n start = time()\n\n psf_diffr = diffraction.add_diffraction(surface=surface, psf_geo=psf_geo, PSF_window=PSF_window,\n scale_mas=spaxel_scale, wavelength=wavelength)\n time_diffpsf = time() - start\n # print(\"Time to add Diffraction: %.3f sec\" % time_diffpsf)\n\n # Fit the PSF to a 2D Gaussian\n start = time()\n guess_x = PSF_window / 2 / 1000\n fwhm_x, fwhm_y = diffraction.fit_psf_to_gaussian(xx=xx_grid, yy=yy_grid, psf_data=psf_diffr,\n x0=cent_x, y0=cent_y, sigmax0=guess_x, sigmay0=guess_x)\n psf_result = psf_diffr\n\n elif mode == \"geometric\":\n\n start = time()\n guess_x = PSF_window / 2 / 1000\n fwhm_x, fwhm_y = diffraction.fit_psf_to_gaussian(xx=xx_grid, yy=yy_grid, psf_data=psf_geo,\n x0=cent_x, y0=cent_y, sigmax0=guess_x, sigmay0=guess_x)\n psf_result = psf_geo\n\n # fig, (ax1, ax2) = plt.subplots(1, 2)\n # img1 = ax1.imshow(psf_geo, extent=[xmin, xmax, ymin, ymax], cmap='plasma', origin='lower')\n # ax1.scatter(x, y, s=1, color='white', alpha=0.5)\n # plt.colorbar(img1, ax=ax1, orientation='horizontal')\n # ax1.set_xlabel(r'X [mm]')\n # ax1.set_ylabel(r'Y [mm]')\n # ax1.set_title(r'Geometric PSF estimate | Surface: %s' % surface)\n #\n # ax2.plot(x_grid, psf_geo[N_points // 2])\n # xbins, bins, p = ax2.hist(x, bins=np.linspace(xmin, xmax, N_points), density=True)\n # for item in p:\n # item.set_height(item.get_height() / np.max(xbins))\n # ax2.set_ylim([0, 1])\n # plt.show()\n\n time_gauss = time() - start\n\n # print('FWHM time: %.3f sec for GeoPSF estimate:' % time_geopsf)\n # print('FWHM time: %.3f sec for DiffPSF convolution:' % time_diffpsf)\n # print('FWHM time: %.3f sec for Gaussian fit:' % time_gauss)\n\n #\n # img2 = ax2.imshow(psf_diffr, extent=[xmin, xmax, ymin, ymax], cmap='plasma', origin='lower')\n # plt.colorbar(img2, ax=ax2, orientation='horizontal')\n # ax2.set_xlabel(r'X [mm]')\n # ax2.set_ylabel(r'Y [mm]')\n # if surface == 'DET':\n # ax2.set_title(r'Diffr. PSF | %.3f microns | %.1f mas | FWHM_x: %.1f $\\mu$m' % (wavelength, spaxel_scale, fwhm_x))\n # elif surface == 'IS':\n # ax2.set_title(r'Diffr. PSF | %.3f microns | %.1f mas | FWHM_y: %.1f $\\mu$m' % (wavelength, spaxel_scale, fwhm_y))\n\n return fwhm_x, fwhm_y, psf_result",
"def solar_ppa():\n per_kwh = 0.196 # [$/kWh]\n\n return per_kwh",
"def snr_f(self, image):\n image_ps = self.pow_spec(image)\n noise_level = numpy.sum(self.rim*image_ps)/numpy.sum(self.rim)\n return numpy.sqrt(image_ps[int(self.size/2), int(self.size/2)]/noise_level)",
"def smape(self) -> float:\n _temp = np.sum(2 * np.abs(self.predicted - self.true) / (np.abs(self.true) + np.abs(self.predicted)))\n return float(100 / len(self.true) * _temp)",
"def calc_skylevel(self, skylevstdp = DEFAULT_SKYLEVELSTD):\n if self.data is None or self.skylevstd == skylevstdp:\n return\n fimagedata = self.data.flatten()\n skymask = fimagedata - self.meanval <= skylevstdp * self.stdval\n fimagedata = fimagedata[skymask]\n if len(fimagedata) < 100:\n raise RemFitsErr(\"No possible sky in file\")\n self.skylev = fimagedata.mean()\n self.skystd = fimagedata.std()\n self.skylevstd = skylevstdp",
"def compute_portfolio_stats(normed_vals, rfr = 0, sf = 252):\n\tportfolio_vals = normed_vals.sum(axis=1)\n\tdaily_returns = (portfolio_vals[1:] / portfolio_vals[:-1].values) - 1\n\n\tcr = (portfolio_vals.iloc[-1]/portfolio_vals.ix[0]) - 1\n\tadr = daily_returns.mean()\n\tsddr = abs(np.std(daily_returns, ddof=1))\n\tsr = np.sqrt(252) * ((daily_returns).mean()/daily_returns.std(ddof=1))\n\t\n\treturn cr, adr, sddr, sr",
"def getFNSA2(ChargeSA):\n temp = 0.0\n for i in ChargeSA:\n temp = temp + i[2]\n if temp == 0.0:\n return 0.0\n return getPNSA2(ChargeSA)/temp",
"def SNRcalc(self, pulsar, pop):\n # if not in region, S/N = 0\n\n # if we have a list of pointings, use this bit of code\n # haven't tested yet, but presumably a lot slower\n # (loops over the list of pointings....)\n \n\n # otherwise check if pulsar is in entire region\n if self.inRegion(pulsar):\n # If pointing list is provided, check how close nearest \n # pointing is\n if self.pointingslist is not None:\n # convert offset from degree to arcmin\n offset = self.inPointing(pulsar) * 60.0\n\n else:\n # calculate offset as a random offset within FWHM/2\n offset = self.fwhm * math.sqrt(random.random()) / 2.0\n else:\n return -2\n\n # Get degfac depending on self.gainpat\n if self.gainpat == 'airy':\n conv = math.pi/(60*180.) # Conversion arcmins -> radians\n eff_diam = 3.0e8/(self.freq*self.fwhm*conv*1.0e6) # Also MHz -> Hz\n a = eff_diam/2. # Effective radius of telescope\n lamda = 3.0e8/(self.freq*1.0e6) # Obs. wavelength\n kasin = (2*math.pi*a/lamda)*np.sin(offset*conv)\n degfac = 4*(j1(kasin)/kasin)**2\n else:\n #### NOTE! HERE I WANT TO CHECK UNITS OF FWHM (ARCMIN???)\n degfac = math.exp(-2.7726 * offset * offset / (self.fwhm *self.fwhm))\n\n # Dunc's code here uses a ^-2.6 to convert frequencies\n # don't think I need to do this - I'm using the frequency in call\n Ttot = self.tsys + self.tskypy(pulsar)\n\n # calc dispersion smearing across single channel\n tdm = self._dmsmear(pulsar)\n\n # calculate bhat et al scattering time (inherited from GalacticOps)\n # in units of ms\n tscat = go.scatter_bhat(pulsar.dm, pulsar.scindex, self.freq)\n\n # Calculate the effective width\n weff_ms = math.sqrt(pulsar.width_ms()**2 + self.tsamp**2 + tdm**2 + tscat**2)\n\n # calculate duty cycle (period is in ms)\n delt = weff_ms / pulsar.period\n #print weff_ms, pulsar.period\n\n # if pulse is smeared out, return -1.0\n if delt > 1.0:\n #print weff_ms, tscat, pulsar.dm, pulsar.gl, pulsar.gb, pulsar.dtrue\n return -1\n else:\n return self._SNfac(pulsar, pop.ref_freq, degfac, Ttot) \\\n * math.sqrt((1.0 -delt)/delt)",
"def ForbesII_calc(TP, FP, FN, TN):\n try:\n n = TP + FP + FN + TN\n part1 = (FP * FN) - (TP * TN)\n part2 = (TP + FP) * (TP + FN)\n part3 = min((TP + FP), (TP + FN))\n return part1 / (part2 - (n * part3))\n except Exception:\n return \"None\"",
"def pe_ratio(self):\n if self._pe_ratio == None:\n return float('inf')\n return self._pe_ratio",
"def _psnr(img1, img2):\n mse = np.mean((img1 - img2) ** 2)\n if mse == 0:\n return 100\n PIXEL_MAX = 1\n return (20 * math.log10(PIXEL_MAX)) - (10 * math.log10(mse))",
"def PV(rate, nper, pmt, fv):\n if type(pmt) == int:\n pmt = np.array([pmt])\n else:\n pmt = np.array(pmt)\n if nper <= 0:\n print(\"nper needs to be greater than zero.\")\n elif nper != len(pmt) and sum(pmt) != 0:\n print(\"pmt vector length needs to match nper or be zero.\")\n else:\n pv_fv = fv / (1 + rate) ** nper\n fv_pmt = [(pmt[i - 1] / (1 + rate) ** i) for i in np.arange(1, len(pmt) + 1, 1)]\n return(sum(fv_pmt) + pv_fv)"
] | [
"0.6681911",
"0.6620132",
"0.65377945",
"0.6458727",
"0.6316942",
"0.6316942",
"0.62935734",
"0.6280627",
"0.62713075",
"0.6183255",
"0.6126164",
"0.6119334",
"0.61046535",
"0.6037856",
"0.6029699",
"0.60194486",
"0.6016596",
"0.60025454",
"0.5961723",
"0.5926681",
"0.5922735",
"0.5916719",
"0.59162444",
"0.59148926",
"0.5900624",
"0.58916336",
"0.5871867",
"0.58499825",
"0.5849679",
"0.5844318",
"0.5833245",
"0.5829697",
"0.58272177",
"0.58158374",
"0.5813314",
"0.58124983",
"0.5807206",
"0.57977194",
"0.5788642",
"0.5782318",
"0.5770329",
"0.5756776",
"0.57561177",
"0.57545215",
"0.57432115",
"0.572279",
"0.5718729",
"0.57133716",
"0.5710346",
"0.5709085",
"0.56999195",
"0.5699714",
"0.5683651",
"0.5681334",
"0.5676755",
"0.5674547",
"0.5673063",
"0.5669985",
"0.5644121",
"0.5642807",
"0.56341887",
"0.5633989",
"0.5630187",
"0.56265557",
"0.56258124",
"0.5621662",
"0.5609448",
"0.56033593",
"0.5593302",
"0.5593224",
"0.55898464",
"0.5587741",
"0.55790424",
"0.5575726",
"0.55751574",
"0.55723614",
"0.5566941",
"0.55551136",
"0.5553007",
"0.5551552",
"0.55484545",
"0.5545066",
"0.5540296",
"0.5536556",
"0.5532853",
"0.55324966",
"0.55321664",
"0.5523044",
"0.55215204",
"0.5517123",
"0.5511073",
"0.55105686",
"0.5505001",
"0.5502375",
"0.54939806",
"0.54936635",
"0.5491602",
"0.54904574",
"0.5489881",
"0.54888064",
"0.5487761"
] | 0.0 | -1 |
Plot an image of the PSF | def plot_PSF(self, coef, wave_idx):
PSF, strehl = self.compute_PSF(coef, wave_idx)
plt.figure()
plt.imshow(PSF)
plt.title('Strehl: %.3f' %strehl)
plt.colorbar()
plt.clim(vmin=0, vmax=1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def plot_prodata_psf(self,font_size=28,img_name='prodata_psf.pdf',img_id=0):\n rawimage = self.raw_image\n dataimage = self.data\n len_mask = self.lens_mask\n plu_mask_out = self.plu_mask\n\n fig, (ax1, ax2, ax3, ax4,ax5) = plt.subplots(1, 5, figsize=(19, 10))\n ax1.imshow((rawimage), origin='lower', cmap=\"gist_heat\")\n ax1.set_title('Original Image', fontsize=font_size)\n ax1.text(rawimage.shape[0] * 0.55, rawimage.shape[0] * 0.8, 'ID='+repr(img_id), size=12, color='white',\n weight=\"bold\")\n ax1.text(rawimage.shape[0] * 0.2, rawimage.shape[0] * 0.05, 'observation', size=20, color='white', weight=\"bold\")\n ax1.axis('off')\n #\n ax2.imshow((dataimage), origin='lower', cmap=\"gist_heat\")\n ax2.set_title('Image Data', fontsize=font_size)\n ax2.text(dataimage.shape[0] * 0.2, dataimage.shape[0] * 0.05, 'image data', size=20, color='white', weight=\"bold\")\n ax2.axis('off')\n #\n ax3.imshow(len_mask, origin='lower')\n ax3.set_title('Lens light', fontsize=font_size)\n ax3.axis('off')\n #\n ax4.imshow(plu_mask_out, origin='lower')\n ax4.set_title('Mask', fontsize=font_size)\n ax4.axis('off')\n#\n psf=self.psf\n ax5.imshow(np.log10(psf), origin='lower', cmap=\"gist_heat\")\n ax5.set_title('lg(PSF)', fontsize=font_size)\n ax5.axis('off')\n\n plt.show()\n fig.savefig(img_name)\n return 0",
"def plot_fppy(self,LAXIS,xbl,xbr,ybu,ybd,ilg): \n\t\t\n # load x GRID\n grd1 = self.xzn0\n\t\n # load DATA to plot\n plt1 = self.fppy\n\t\t\t\t\n # create FIGURE\n plt.figure(figsize=(7,6))\n\t\t\n # format AXIS, make sure it is exponential\n plt.gca().yaxis.get_major_formatter().set_powerlimits((0,0))\t\t\n\n # set plot boundaries \n to_plot = [plt1]\t\t\n self.set_plt_axis(LAXIS,xbl,xbr,ybu,ybd,to_plot)\n\t\t\t\t\n # plot DATA \n plt.title(r'pressure flux y')\n plt.plot(grd1,plt1,color='brown',label = r'f$_{py}$')\n\n # define and show x/y LABELS\n setxlabel = r\"r (cm)\"\n setylabel = r\"$f_{py}$ (erg cm$^{-2}$ s$^{-1}$)\"\n plt.xlabel(setxlabel)\n plt.ylabel(setylabel)\n\t\t\n # show LEGEND\n plt.legend(loc=ilg,prop={'size':18})\n\n # display PLOT\n plt.show(block=False)\n\n # save PLOT\n plt.savefig('RESULTS/'+self.data_prefix+'mean_fppy.png')",
"def plot_pz(pz_inst):\n plt.figure()\n \n plt.plot(pz_inst.z_data, pz_inst.p_data)\n \n plt.savefig(\"pz_figure.png\")\n plt.close()",
"def show(image):\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()",
"def plot(self,id=1,dpi=150):\n fig = plt.figure(id)\n ax1 = fig.add_subplot(111)\n ax1.imshow(self.image,interpolation='nearest',extent=[self.xmin,self.xmax,\n self.ymin,self.ymax], origin='lower')\n #plt.savefig('.png',dpi=dpi)\n plt.draw()",
"def plotSate(s,i,seed):\r\n fig, ax = plt.subplots()\r\n\r\n im = ax.imshow(s)\r\n\r\n plt.xticks([i for i in range(dim)], \"\")\r\n plt.yticks([i for i in range(dim)], \"\")\r\n\r\n fig.tight_layout()\r\n plt.savefig(\"Systems/\" + str(dim) + \"_\" + str(seed) + \"/Images/\" + str(i) +\r\n \".jpeg\",quality=80,optimize=True,\r\n dpi=80,progressive=True,transparent=True)\r\n fig.clear()\r\n plt.close(fig)",
"def show(image):\n from matplotlib import pyplot\n import matplotlib as mpl\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()",
"def show(image):\n from matplotlib import pyplot\n import matplotlib as mpl\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()",
"def show(image):\n from matplotlib import pyplot\n import matplotlib as mpl\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()",
"def plot(self):\n\t\tself.plotOfTF().plot()",
"def pf_plot(pf, t):\n xx = pf.XS[t, :, 0]\n yy = pf.XS[t, :, 1]\n ww = pf.WS[t, :]\n plt.scatter(xx, yy, s=ww * 5000)",
"def show(image):\n fig = pyplot.figure()\n axis = fig.add_subplot(1, 1, 1)\n imgplot = axis.imshow(image)\n imgplot.set_interpolation('nearest')\n axis.xaxis.set_ticks_position('top')\n axis.yaxis.set_ticks_position('left')\n pyplot.show()",
"def psf_plot(self, irfname=None, outfile='psf.csv', title=''):\n psf = self.get_psf(irfname)\n \n def bkg_size(e, ct):\n f2 = lambda delta: psf(e,ct, delta)**2 * 2*np.pi*delta\n return np.degrees(1./np.sqrt(np.pi*integrate.quad(f2, 0, np.inf)[0]))\n \n def loc_size(e, ct):\n func = lambda x : psf(e,ct, x)\n fprime = lambda x : misc.derivative(func, x, dx=0.0001, order=5)\n integrand = lambda rp : rp * fprime(rp)**2/func(rp) * np.pi\n return np.degrees(1/np.sqrt(integrate.quad(integrand, 0, np.radians(5))[0]))\n \n \n egev = np.logspace(-1.+1/8., 2.5+1/8., 3.5*4+1)\n front, back = [[bkg_size(e*1e3,ct) for e in egev] for ct in range(2)]\n floc, bloc = [[loc_size(e*1e3,ct) for e in egev] for ct in range(2)]\n f68,b68 = [[psf.inverse_integral(e*1e3, ct) for e in egev] for ct in range(2)]\n fig,ax = plt.subplots(figsize=(6,6))\n for x, s, label in zip((front, back, floc, bloc, f68, b68),\n ('-g', 'r', '--g', '--r', ':g', ':r'),\n ('front bkg', 'back bkg','front loc', 'back loc', 'front 68', 'back 68')):\n ax.plot(egev, x, s, lw=2, label=label)\n \n plt.setp(ax, xlabel='Energy (GeV)', ylabel='PSF size (deg)', xscale='log', yscale='log',\n xlim=(0.1, 100), ylim=(0.02, 8), title=title)\n ax.legend(prop=dict(size=10)); ax.grid()\n #x.set_xticklabels('0.1 1 10 100'.split())\n #ax.set_yticklabels('0.01 0.1 1'.split())\n if outfile is None: return fig\n self.psf_df = pd.DataFrame(dict(front=front, floc=floc, back=back, bloc=bloc,f68=f68,b68=b68), \n index=egev.round(3))\n self.psf_df.index.name='energy'\n self.psf_df.to_csv(os.path.join(self.plotfolder, outfile))\n print ('wrote file %s' % os.path.join(self.plotfolder, outfile))\n return fig",
"def show_image(path):\n img = mpimg.imread(path)\n imgplot = plt.imshow(img)\n plt.show()\n plt.close()",
"def Plot(self):\n\n ### Create the path names ###\n folder_string = self.params.folder+\"/plots/\"\n u_string = self.params.folder+\"/plots/u.pdf\"\n p_string = self.params.folder+\"/plots/p.pdf\"\n\n ### Check if folder exists ###\n if not os.path.exists(folder_string): os.makedirs(folder_string)\n\n ### Plot the x component of velocity ###\n plot(self.u_next[0],title=\"Velocity in the x Direction\")\n plt.savefig(u_string)\n plt.figure()\n\n ### Plot the pressure ###\n plot(self.p_next,title=\"Pressure\")\n plt.savefig(p_string)\n plt.show()",
"def test_random_single_image():\n\n shap.image_plot(np.random.randn(3, 20, 20), np.random.randn(3, 20, 20), show=False)",
"def display_image(X):\n\n\tim = X.reshape(28, 28)\n\ttemp = plt.imshow(im)\n\tplt.show()",
"def show(image,label,pred):\n from matplotlib import pyplot\n import matplotlib as mpl\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n s=\"True Label : \"+str(label)+\" Predicted label : \"+str(pred)\n pyplot.xlabel(s,fontname=\"Arial\", fontsize=20 )\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()",
"def _init_plot(self) -> None:\n\n # create a grayscale plot\n out = sys.stdout\n sys.stdout = open(\"/dev/null\", \"w\")\n hdu = self.image_generator.image(self.ra, self.dec)\n self.plot = aplpy.FITSFigure(hdu)\n self.plot.show_grayscale()\n self.plot.set_theme(\"publication\")\n sys.stdout = out\n\n # label for the position angle\n pa_string = \"PA = %.1f\" % self.mode_details.position_angle().to_value(u.deg)\n if self.mode_details.automated_position_angle():\n pa_string += \" (auto)\"\n self.draw_label(0.95, -0.05, pa_string, style=\"italic\", weight=\"bold\")\n\n # label for the title\n if self.title:\n self.draw_label(\n 0.5, 1.03, self.title, style=\"italic\", weight=\"bold\", size=\"large\"\n )\n\n # label for the image source\n self.draw_label(\n -0.05,\n -0.05,\n \"%s\" % self.image_generator.source(),\n style=\"italic\",\n weight=\"bold\",\n )\n\n # grid overlay\n self.plot.add_grid()\n self.plot.grid.set_alpha(0.2)\n self.plot.grid.set_color(\"b\")\n\n # indicate the RSS field of view\n self.draw_circle(self.ra, self.dec, 4.0 * u.arcmin, \"g\")\n self.draw_label(\n 0.79,\n 0.79,\n \"RSS\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n horizontalalignment=\"left\",\n color=(0, 0, 1),\n )\n\n # indicate the Salticam field of view\n self.draw_circle(self.ra, self.dec, 5.0 * u.arcmin, \"g\")\n self.draw_label(\n 0.86,\n 0.86,\n \"SCAM\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n horizontalalignment=\"left\",\n color=(0, 0, 1),\n )\n\n # labels for north and east direction\n self.draw_label(\n self.ra,\n self.dec + 4.8 * u.arcmin,\n \"N\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n color=(0, 0.5, 1),\n )\n self.draw_label(\n self.ra + 4.8 * u.arcmin / np.abs(np.cos(self.dec)),\n self.dec,\n \"E\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n horizontalalignment=\"right\",\n color=(0, 0.5, 1),\n )\n\n # add cross hairs\n self.draw_centered_line(\n 0 * u.deg,\n 8 * u.arcmin,\n self.ra,\n self.dec,\n color=\"g\",\n linewidth=0.5,\n alpha=1.0,\n )\n self.draw_centered_line(\n 90 * u.deg,\n 8 * u.arcmin,\n self.ra,\n self.dec,\n color=\"g\",\n linewidth=0.5,\n alpha=1.0,\n )\n\n # label for the magnitude range and bandpass\n if self.magnitude_range:\n self._show_magnitudes()\n\n # add mode specific content\n if not self.basic_annotations:\n self.mode_details.annotate_finder_chart(self)",
"def imshow(img):\n imadd(img)\n plt.ion()\n plt.show()",
"def show_image(image):\r\n plt.imshow(image, cmap='gray')\r\n plt.show()",
"def plot(self):\n self.fig = plt.figure('black hole')\n self.fig.clf() #clear the graph to avoir superposing data from the same set (can be deactivated if need to superpose)\n self.ax = plt.subplot()\n\n if self.img2 is not None:\n self.ax.imshow(self.img2)\n else:\n print(\"No black hole deformation in the memory, displayed the original image instead.\")\n self.ax.imshow(self.img_debut)\n\n self.fig.canvas.set_window_title('Black hole')\n self.ax.set_title(\"scrool to zoom in or out \\nright click to add an offset in the background \\nleft click to refresh image \\n close the option windows to stop the program\")\n self.fig.canvas.mpl_connect('scroll_event', self.onscroll)\n self.fig.canvas.mpl_connect('button_press_event', self.onclick)\n self.fig.canvas.mpl_connect('axes_leave_event', self.disconnect)\n self.fig.canvas.mpl_connect('axes_enter_event', self.connect)\n\n self.draw()",
"def plot(self):\n\t\tself.plotOfSpect().plot()",
"def show_flow(filename):\n flow = read_flow(filename)\n img = flow_to_image(flow)\n plt.imshow(img)\n plt.show()",
"def draw_image(self):\n \n pixel_array = self.imageprepare(self.image_path)\n newArr = self.reshape_pixel_array(pixel_array)\n plt.imshow(newArr, interpolation='nearest')\n plt.savefig('MNIST_IMAGE.png')#save MNIST image\n plt.show()#Show / plot that image",
"def plot_fr_and_spikes(self, t):\n plt.figure(figsize=(10, 8))\n\n plt.subplot(2, 2, 1)\n self.plot_base_image()\n\n plt.subplot(2, 2, 2)\n self.plot_firing_rates(t, mode='ON')\n plt.title('Retinal Image')\n\n # Spikes\n ax = plt.subplot(2, 2, 3)\n self.plot_spikes(ax, t, mode='ON', moving_average=True)\n\n ax = plt.subplot(2, 2, 4)\n self.plot_spikes(ax, t, mode='OFF', moving_average=True)",
"def plot(self):\n\t\tself.plotOfSpect()",
"def plot_sample(x):\n plt.imshow(x[:,:,0])\n plt.title(\"gasf\")\n plt.colorbar()\n plt.show()\n\n plt.imshow(x[:,:,1])\n plt.title(\"gadf\")\n plt.colorbar()\n plt.show()\n\n plt.imshow(x[:,:,2])\n plt.title(\"mtf\")\n plt.colorbar()\n plt.show()",
"def show_plot(img, title):\n plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))\n plt.title(\"Hand Number: \" + title)\n plt.show()",
"def plot_image(img, label=\"\"): \n if img.shape[0] == 3:\n img = img.transpose(1,2,0)\n fig,ax = plt.subplots(1)\n sns.set_style('white')\n ax.imshow(np.asarray(img))\n if label!=\"\":\n plt.title(number_label[label])\n return fig,ax",
"def plot(self, param):\n # process param to be lowercase and without spaces\n param = str(param).lower().replace(\" \", \"\")\n plt.ion() # enable interactive mode\n\n # making plot according to param\n if param == \"illuminantenergy\": # energy of illuminant\n self.illuminant.plot(\"energy\")\n elif param == \"illuminantphotons\": # photons of illuminant\n self.illuminant.plot(\"photons\")\n elif param == \"srgb\": # srgb image of the scene\n plt.imshow(self.srgb)\n plt.show()\n else:\n raise(ValueError, \"Unknown parameter\")",
"def make_image(self, frame, filename, **kwds):\n p = plot.plot(frame, **kwds)\n p.save_image(filename)",
"def plot_img():\n plt.subplot(121)\n plt.imshow(data.data.numpy()[0,].squeeze())\n plt.subplot(122)\n plt.imshow(dec_mean.view(-1,28,28).data.numpy()[0,].squeeze())\n\n plt.show()\n plt.pause(1e-6)\n plt.gcf().clear()\n sample = model.sample_z(data) \n plt.imshow(sample)",
"def show_plot() :\n logger.info(\"Show plot\")\n pylab.axis('equal')\n pylab.xlabel(\"Longitud\")\n pylab.ylabel(\"Latitud\")\n pylab.grid(True)\n pylab.title(\"Product tiles and product source\")\n pylab.show()",
"def visualizeImg(img):\n plt.figure(figsize=(10,4))\n plt.imshow(img)\n plt.show()",
"def show_points_on_img(mask,img):\n labeled, num_objects = ndi.label(mask)\n slices = ndi.find_objects(labeled)\n x, y = [], []\n for dy,dx in slices:\n x_center = (dx.start + dx.stop - 1)/2\n x.append(x_center)\n y_center = (dy.start + dy.stop - 1)/2 \n y.append(y_center)\n plt.figure()\n plt.imshow(img)\n plt.autoscale(False)\n plt.plot(x,y, \"o\")",
"def plot(self):\n pass",
"def plot_plasma(self):\n x = self.geom.x\n fig, axes = plt.subplots(1, 2, figsize=(8, 3),\n constrained_layout=True)\n # plot densities\n ax = axes[0]\n ax.plot(x, self.ne, 'b-')\n ax.plot(x, self.ni, 'r-')\n ax.legend(['E', 'Ion'])\n ax.set_xlabel('Position (m)')\n ax.set_ylabel('Density (m^-3)')\n # plot temperature\n ax = axes[1]\n ax.plot(x, self.Te, 'b-')\n ax.plot(x, self.Ti, 'r-')\n ax.legend(['Te', 'Ti'])\n ax.set_xlabel('Position (m)')\n ax.set_ylabel('Temperature (eV)')\n plt.show()",
"def show_image(dataset, domain, image_class, image_name):\n\timage_file = io.imread(os.path.join(\"data\", dataset, domain, \"images\", image_class, image_name))\n\tplt.imshow(image_file)\n\tplt.pause(0.001)\n\tplt.figure()",
"def plot(self):\n\n fig = plt.figure()\n ax = fig.add_subplot(111, projection=Axes3D.name)\n\n # TODO Use numpy to rotate esp_points matrix for faster variable access.\n ax.scatter(\n xs=[i[0][0] for i in self.esp_points],\n ys=[i[0][1] for i in self.esp_points],\n zs=[i[0][2] for i in self.esp_points],\n c=[i[1] for i in self.esp_points],\n marker='o',\n s=2,\n alpha=0.5\n )\n\n ax.scatter(\n xs=[i[0][0] for i in self.atom_points],\n ys=[i[0][1] for i in self.atom_points],\n zs=[i[0][2] for i in self.atom_points],\n c=[i[1] for i in self.atom_points],\n marker='X',\n s=100\n )\n\n plt.show()",
"def plot(self):\n\t\tself.plotOfXray().plot()",
"def plot_single_image(image):\n image = image.cpu()\n \n assert type(image) is torch.Tensor, 'Image to plot is not torch.Tensor'\n image_size = int(np.sqrt(image.shape[0]))\n image = image.view(image_size, image_size)\n \n fig = plt.imshow(image, cmap = 'gray')\n fig.axes.get_xaxis().set_visible(False)\n fig.axes.get_yaxis().set_visible(False)\n plt.show()\n plt.close('all')",
"def show():\n\tplt.show()",
"def plot(self):\n # Get data\n #print(self.file_name)\n fig, ax = plb.subplots(1,1,figsize=(18,20))\n for key,value in self.testTrend.items():\n x = np.arange(len(self.data_array))\n y = np.asarray(value)\n plb.plot(x,y, label=key)\n ax.scatter(x, y)\n for i in range(0, len(value)):\n ax.annotate(str(i), (x[i], y[i]))\n # Title\n plb.title(self.file_name)\n # Legend\n plb.legend(bbox_to_anchor=(.05, 1), loc='best', borderaxespad=0.)\n # x ticks\n plb.xticks(np.arange(min(x), max(x) + 1, 2.0))\n #plb.ylim(-250, 1)\n # Show image\n plb.show()",
"def plot_img(X: np.ndarray, **kwargs):\n kwargs.setdefault('origin', 'lower') # Sane default\n plt.imshow(X, **kwargs)",
"def fdplot(self, imx):\n fig = plt.figure()\n maxval = np.max(imx)\n ims = list(map(lambda im: [plt.imshow(np.fabs(im),norm=colors.Normalize(0.0,maxval))], imx))\n animation = anim.ArtistAnimation(fig,ims,interval=50)\n plt.show()",
"def show():\n setup()\n plt.show()",
"def show_env(self, img):\n plt.figure(1)\n plt.subplot(111)\n plt.imshow(img, interpolation=\"nearest\")\n plt.show()",
"def plot(path, subjects):\n transformToXYZmm = np.array([[-3.125, 0, 0, 81.250], [0, 3.125, 0, -115.625], [0, 0, 6, -54.000], [0, 0, 0, 1.000]])\n data = data_load.load_data(path, subjects)\n dimx = int(data[0][\"meta\"][\"dimx\"][0])\n dimy = int(data[0][\"meta\"][\"dimy\"][0])\n dimz = int(data[0][\"meta\"][\"dimz\"][0])\n coordToCol = data[0][\"meta\"][\"coordToCol\"][0][0]\n images = {}\n max_val = 0\n voxels = np.load(\"data/general_selected_500_1.npy\")\n directory = os.listdir(\"data/input/\")\n bar = pyprind.ProgBar(len(directory), title='Info extraction and Image Building')\n bar2 = pyprind.ProgBar(len(images.keys()), title='Saving Pictures')\n for file in directory:\n file_name = \"data/input/{}\".format(file)\n fh = open(file_name)\n activation_values = np.asarray(list(map(lambda x: float(x), filter(lambda x: x != '', fh.read().split(\",\")))))\n fh.close()\n plot_matrix = np.zeros((dimx, dimy, dimz))\n for x in range(dimx):\n for y in range(dimy):\n for z in range(dimz):\n indice = coordToCol[x][y][z]\n if indice != 0:\n if indice in list(voxels):\n voxel_indice = list(voxels).index(indice)\n value = activation_values[voxel_indice]\n if abs(value) > max_val:\n max_val = abs(value)\n plot_matrix[x][y][z] = value\n image = nib.Nifti1Image(plot_matrix, transformToXYZmm)\n images[file_name] = image\n bar.update(force_flush=True)\n print(bar)\n for image in images:\n plotting.plot_glass_brain(images[image], display_mode='ortho', vmax=max_val, plot_abs=False, threshold=None, colorbar=True, output_file=\"{}-wom1.png\".format(image))\n bar2.update(force_flush=True)\n print(bar2)",
"def plot(data, interactive=False):\n if interactive:\n plt.ion()\n fig = plt.figure()\n fig.canvas.draw()\n image = call_imshow(data)\n else:\n fig = plt.figure()\n image = call_imshow(data)\n plt.show()\n return fig, image",
"def plot_numpy_img(np_img):\n plt.imshow(np_img, interpolation='nearest')\n plt.show()",
"def visualize(original, s, m, l, s_pred, m_pred, l_pred):\n\tfig = plt.figure(figsize=(20, 10))\n\tplt.subplot(1,7,1)\n\tplt.title('Original image')\n\tplt.imshow(original)\n\n\tplt.subplot(1,7,2)\n\tplt.title('S image')\n\tplt.imshow(s)\n\tplt.subplot(1,7,3)\n\tplt.title('S Pred image')\n\tplt.imshow(s_pred)\n\n\tplt.subplot(1,7,4)\n\tplt.title('M image')\n\tplt.imshow(m)\n\tplt.subplot(1,7,5)\n\tplt.title('M Pred image')\n\tplt.imshow(m_pred)\n\n\tplt.subplot(1,7,6)\n\tplt.title('L image')\n\tplt.imshow(l)\n\tplt.subplot(1,7,7)\n\tplt.title('L Pred image')\n\tplt.imshow(l_pred)",
"def show(self):\n plt.show()",
"def plot_potential(self):\n imshow(self.U, extent=(self.x[0], self.x[-1], self.y[0], self.y[-1]), aspect='auto', interpolation='None')\n xlabel('x')\n ylabel('y')",
"def show(self):\n \n \n \n \n \n \n r = 4\n f, axarr = plt.subplots(r, r, figsize=(8,8))\n counter = 0\n for i in range(r):\n for j in range(r):\n temp = self.x[counter,:]\n counter += 1\n img = self.x[counter,:]\n axarr[i][j].imshow(img)\n #######################################################################\n # #\n # #\n # TODO: YOUR CODE HERE #\n # #\n # #\n #######################################################################",
"def plot(self, fname=None):\n x = np.linspace(self.bounds[0], self.bounds[-1], 200)\n y = [self.evaluate(xi) for xi in x]\n plt.figure()\n plt.plot(x, y, label='Class func')\n plt.plot(self.bounds, self.gis, 'o', label='Algorithm')\n plt.grid(color='0.7')\n plt.xlabel('Dependent Variable')\n plt.ylabel('PP Transformed Class Value')\n if fname:\n plt.savefig(fname)\n else:\n plt.show()",
"def plot_vector_as_image(image, h, w, title):\n plt.imshow(image.reshape((h, w)), cmap=plt.cm.gray)\n plt.title(title, size=12)\n plt.show()",
"def visualize_scan(self):\n fig = plt.figure()\n ax = fig.add_subplot(111, projection='3d')\n ax.scatter(self.p1_points[:, 0], self.p1_points[:, 1], self.p1_points[:, 2], c='r')\n ax.scatter(self.p2_points[:, 0], self.p2_points[:, 1], self.p2_points[:, 2], c='g')\n ax.scatter(self.p3_points[:, 0], self.p3_points[:, 1], self.p3_points[:, 2], c='b')\n ax.scatter(self.p4_points[:, 0], self.p4_points[:, 1], self.p4_points[:, 2])\n\n ax.set_xlabel('x')\n ax.set_ylabel('y')\n ax.set_zlabel('z')\n plt.show()",
"def plot_image(image):\n plt.imshow(image, cmap=\"gray\", interpolation=\"nearest\")\n plt.axis(\"off\")",
"def plot(self):\n\t\tself.plotOfIP().plot()",
"def main():\n # Initialize the Serpinski set\n print(\"==> Making serpinski set...\")\n my_serpinski = Serpinski(400, 400, 0)\n num = 8\n print(\"==> Generating\", num, \"levels of subsets :)\")\n for _ in range(9):\n my_serpinski.add_subset()\n # Draw Serpinski\n # print(\"==> Drawing the set. This might take quite some time!\\\n # Damn Inefficient!\")\n # my_serpinski.draw_me()\n\n # Initialize Coordinates\n length = 50000 # Number of random dots\n x_coord = []\n y_coord = []\n index = 0\n\n # try length particles in serp set\n print(\"==> Randomly choosing\", length, \"dots...\")\n while index < length:\n # Chech if dot in bound\n rand_y = np.random.uniform(low=400.0 - 200.0 * np.sqrt(3) / 2.0,\n high=400.0)\n # rand_x in triangle // condition //\n diff = 400.0 - rand_y\n x_diff = diff / np.sqrt(3)\n rand_x = np.random.uniform(low=400.0 - x_diff,\n high=400 + x_diff)\n\n if my_serpinski.is_bound(rand_x, rand_y):\n x_coord.append(rand_x)\n y_coord.append(rand_y)\n index += 1\n\n # Draw image using scatter\n print(\"Scattering the dots ;)\")\n plt.scatter(x_coord, y_coord, s=0.1)\n # Show image\n dpi = 600\n print(\"==> Saving to .jpg with dpi=\", dpi)\n plt.savefig(\"fractalstuff.jpg\", dpi=dpi, bbox_inches='tight')",
"def _plot(self):\r\n fig = plt.figure()\r\n\r\n # Take out second component of intensity if needed\r\n # if self._vna.isTwoComponents():\r\n # intensitySimplified = []\r\n # for i in range(len(self._intensity)):\r\n # tempSet = []\r\n # for j in range(len(self._intensity[i])):\r\n # if (j%2) == 0:\r\n # tempSet.append(self._intensity[i][j])\r\n # intensitySimplified.append(tempSet)\r\n # for i in range(len(self._frequency)):\r\n # plt.plot(self._frequency[i],intensitySimplified[i],label=('%sv' % self._voltages[i][0]))\r\n # else:\r\n for i in range(len(self._frequency)):\r\n plt.plot(self._frequency[i],self._intensity[i],label=('%sv' % self._voltages[i][0]))\r\n plt.legend(loc='upper left')\r\n fig.suptitle('Intensity-Frequency with non-Constant Voltage', fontsize=18)\r\n plt.xlabel('Frequency (Hz)', fontsize=18)\r\n plt.ylabel('Intensity (dBm)', fontsize=16)\r\n\r\n # Save plot\r\n self._saveFig()",
"def show_image(f, x, y):\n window_surface.blit(f, (x, y))",
"def showimage(image):\n mplt.figure()\n mplt.imshow(image)\n mplt.show()",
"def show():\n plt.show()",
"def show():\n plt.show()",
"def show():\n plt.show()",
"def show_image(img, figsize=(10, 10)):\n plt.figure(figsize=figsize)\n plt.imshow(img)\n plt.show()",
"def mri_point_plot(self, vcol=1):\n img = self.voxels\n points = self.point_position \n ax = []\n fig = plt.figure(figsize=(9, 8))\n # TODO make this setable in the function call\n columns = 3\n rows = 2\n\n for i in range(points.shape[0]):\n im_slice = int(np.round(points[i, vcol]))\n if vcol == 0:\n im = img[im_slice, :, :]\n elif vcol == 1:\n im = img[:, im_slice, :]\n else:\n im = img[:, :, im_slice]\n ax.append( fig.add_subplot(rows, columns, i+1))\n ax[-1].set_title(\"Image depth: \"+str(im_slice)) # set title\n plt.imshow(im)\n plot_cols = np.array([0, 1, 2])\n plot_cols = plot_cols[plot_cols != vcol]\n plt.plot(points[i, min(plot_cols)], points[i, max(plot_cols)], 'ro')\n\n plt.show()",
"def plotArt(self):\n self.isArt=True\n warr=self.ws.value(self.xarr)\n asfarr=st.interpolate(warr, self.swarr, self.sfarr, left=0.0, right=0.0)\n asfarr=asfarr*self.farr.max()/asfarr.max()\n self.fpcurve,=self.axes.plot(self.xarr,asfarr,linewidth=0.5,linestyle='-',\n marker='None',color='r')",
"def plot_image(ax, example, training=True):\n ax.grid(False) # hide grid lines\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n\n if training:\n prefix = example['metadata'][0]['image_prefix'] / \"training/image_2/\"\n else:\n prefix = example['metadata'][0]['image_prefix'] / \"testing/image_2/\"\n\n image_file = prefix / Path(f\"{example['metadata'][0]['image_idx']:06d}\" + \".png\")\n img = plt.imread(image_file)\n ax.imshow(img)",
"def plot(self, x, y, b, path=None):\n label = [\"atypical\", \"indeterminate\", \"negative\", \"typical\"]\n _, pred = self.cam_model.predict(x)\n for i in range(len(x)):\n image = x[i] if x.shape[-1] == 3 else np.squeeze(x[i], -1)\n\n fig, axs = plt.subplots(2, 2)\n for j in range(4):\n ax_x = [0, 1, 0, 1]\n ax_y = [0, 0, 1, 1]\n ax = axs[ax_x[j], ax_y[j]]\n p = np.argmax(pred[i])\n a = np.argmax(y[i])\n c = '(pa)' if j == p and p == a else '(p)' if j == p else '(a)' if j == a else ''\n ax.title.set_text(f\"{label[j]} {c}\")\n # hide axis ticks\n plt.setp(ax.get_xticklabels(), visible=False)\n plt.setp(ax.get_yticklabels(), visible=False)\n ax.tick_params(axis='both', which='both', length=0)\n # plot original image with boxes\n ax.imshow(image, cmap=\"gray\", aspect=\"equal\")\n for box in b[i]:\n ax.add_patch(Rectangle((box[\"x\"], box[\"y\"]), box[\"width\"], box[\"height\"], linewidth=1, edgecolor=\"r\", facecolor=\"None\", alpha=0.6))\n # plot CAM\n camap = self.generate(x[i], label=j, zoom=True)\n camap = ax.imshow(camap, cmap=\"coolwarm\", aspect=\"equal\", alpha=0.6)\n #cax = fig.add_axes([ax2.get_position().x1+0.01, ax2.get_position().y0,0.02, ax2.get_position().height])\n #plt.colorbar(camap, cax=cax, orientation=\"vertical\")\n if path != None: plt.savefig(path + f\"_{i}.png\", dpi=300, format=\"png\")\n plt.show()",
"def plot_harris_points(image, filtered_coords):\n plt.figure()\n plt.imshow(image)\n plt.plot([p[1] for p in filtered_coords], [p[0] for p in filtered_coords], 'r*')\n plt.axis('off')\n plt.title('harris points')\n plt.show()",
"def plotFeatures(self):\n fl=np.array(self.xp)*0.0+0.25*self.farr.max()\n self.splines=self.axes.plot(self.xp, fl , ls='', marker='|', ms=20, color='#00FF00')\n #set up the text position\n tsize=0.83\n self.ymin, self.ymax = self.axes.get_ylim()\n ppp=(self.ymax-self.ymin)/(self.arcfigure.figure.get_figheight()*self.arcfigure.figure.get_dpi())\n f=self.ymax-10*tsize*ppp\n for x,w in zip(self.xp, self.wp):\n w='%6.2f' % float(w)\n self.axes.text(x, f, w, size='small', rotation='vertical', color='#00FF00')",
"def plot(self, title='', file_name='schelling.png'):\n import matplotlib.pyplot as plt\n fig, ax = plt.subplots()\n #If you want to run the simulation with more than 7 colors, you should set agent_colors accordingly\n colors = ['b','r','g','c','m','y','k']\n for person in self.people:\n ax.scatter(\n person.home.x+0.5,\n person.home.y+0.5,\n s = 50.,\n color=colors[person.group]\n )\n ax.set_title(title, fontsize=10, fontweight='bold')\n ax.set_xlim([0, self.nx])\n ax.set_ylim([0, self.ny])\n ax.set_xticks([])\n ax.set_yticks([])\n plt.savefig(file_name)",
"def plot_psd(self):\n from .error import show_error\n from ..backend.viz_raw import \\\n _plot_topomap, _plot_matrix, _plot_all_psd\n\n if self.plotType == 'Topomap':\n try:\n _plot_topomap(self)\n except ValueError:\n show_error(\n 'No coordinates for topomap have been initialized:(')\n self.ui.selectPlotType.setCurrentIndex(0)\n\n if self.plotType == 'Matrix':\n _plot_matrix(self)\n\n if self.plotType == 'All PSD':\n _plot_all_psd(self)",
"def plot(self):\n R = self.length\n\n plt.figure()\n for ii, car in enumerate(self.cars):\n theta = self.positions[ii] + car.position\n x = R * np.cos(theta)\n y = R * np.sin(theta)\n if ii == 0:\n plt.scatter(x, y, marker='x')\n else:\n plt.scatter(x, y)\n\n plt.axis('scaled')\n lim = (-1.2 * R, 1.2 * R)\n plt.ylim(lim)\n plt.xlim(lim)\n plt.savefig('traffic_{:d}.png'.format(self.time))\n plt.close()",
"def plot_sim_imgs(self, frqs_sim_guess=None, figsize=(20, 10)):\n\n # real space coordinate data\n x = self.dx * (np.arange(self.nx) - self.nx / 2)\n y = self.dy * (np.arange(self.ny) - self.ny / 2)\n\n extent = tools.get_extent(y, x)\n\n # frequency coordinate data\n dfx = self.fx[1] - self.fx[0]\n dfy = self.fy[1] - self.fy[0]\n\n extent_ft = tools.get_extent(self.fx, self.fy)\n\n # plot FT of sim images\n figh = plt.figure(figsize=figsize)\n plt.suptitle('SIM images, real space and power spectra')\n grid = plt.GridSpec(self.nphases, self.nangles*2)\n\n # parameters for ft plot\n gamma = 0.1 # gamma for PowerNorm plot of power spectra\n\n # parameters for real space plot\n vmin = np.percentile(self.imgs.ravel(), 0.1)\n vmax = np.percentile(self.imgs.ravel(), 99.9)\n mean_int = np.mean(self.imgs, axis=(1, 2, 3))\n rel_int = mean_int / np.max(mean_int)\n\n for ii in range(self.nangles):\n for jj in range(self.nphases):\n\n # set real space image\n ax = plt.subplot(grid[jj, 2 * ii])\n ax.imshow(self.imgs[ii, jj], vmin=vmin, vmax=vmax, extent=extent, interpolation=None)\n\n if jj == 0:\n plt.title('angle %d, relative intensity=%0.3f' % (ii, rel_int[ii]))\n if ii == 0:\n plt.ylabel(\"Position (um)\")\n if jj == (self.nphases - 1):\n plt.xlabel(\"Position (um)\")\n\n # plot power spectra\n ax = plt.subplot(grid[jj, 2*ii + 1])\n\n ax.imshow(np.abs(self.imgs_ft[ii, jj]) ** 2, norm=PowerNorm(gamma=gamma), extent=extent_ft)\n circ = matplotlib.patches.Circle((0, 0), radius=self.fmax, color='k', fill=0, ls='--')\n ax.add_artist(circ)\n\n if frqs_sim_guess is not None:\n circ2 = matplotlib.patches.Circle((frqs_sim_guess[ii, 0], frqs_sim_guess[ii, 1]), radius=20 * dfx,\n color='k', fill=0, ls='-')\n ax.add_artist(circ2)\n circ3 = matplotlib.patches.Circle((-frqs_sim_guess[ii, 0], -frqs_sim_guess[ii, 1]), radius=20 * dfx,\n color='k', fill=0, ls='-')\n ax.add_artist(circ3)\n\n angle = np.angle(frqs_sim_guess[ii, 0] + 1j * frqs_sim_guess[ii, 1])\n period = 1 / np.sqrt(frqs_sim_guess[ii, 0] ** 2 + frqs_sim_guess[ii, 1] ** 2)\n\n if jj == 0:\n plt.title('%0.3fdeg, %0.3fnm' % (angle * 180 / np.pi, period))\n\n ax.set_xlim([-self.fmax, self.fmax])\n ax.set_ylim([self.fmax, -self.fmax])\n\n if jj == (self.nphases - 1):\n plt.xlabel(\"Frq (1/um)\")\n\n return figh",
"def matplotlibDisplay(img, title=\"Image\", colorFlag = 'gray'):\n plt.imshow(img, colorFlag)\n plt.title(title)\n plt.xticks([])\n plt.yticks([])\n plt.show()",
"def plot_image_sequence(self):\r\n\r\n imv = pg.ImageView()\r\n\r\n imv.show()\r\n\r\n imv.setImage(self.imageData)\r\n\r\n self.layout.addWidget(imv, 0, 0)\r\n\r\n\r\n\r\n avgImage = np.mean(self.imageData, axis=0)\r\n\r\n ima = pg.ImageView()\r\n\r\n ima.setImage(avgImage)\r\n\r\n self.layout.addWidget(ima, 1, 0)",
"def print_image(indiv,name):\n routine = gp.compile(indiv,pset)\n output = gen_beat_output(routine)\n bits = np.array(map(bitlist,output)[0:24000]).transpose()\n plt.style.use('classic')\n plt.imshow(bits,interpolation='nearest',aspect='auto',cmap=plt.get_cmap('Greys'))\n plt.savefig(name+\".png\",dpi=150)",
"def plot_image_path(image_path, log_image_path=False):\n img = load_img(image_path)\n if log_image_path:\n plt.title(image_path)\n plt.imshow(img)\n plt.axis('off')\n plt.show()",
"def show_figure(self):\n pylab.show()",
"def plot_random_generated_images(self):\n dimensions=(10, 10)\n figsize=(10, 10)\n n_samples=100\n \n (X, _), _ = self.generate_generator_prediction_samples(n_samples)\n \n self.grid_plot(X, dimensions=dimensions, figsize=figsize)",
"def plot_obs(self):\n if self.obs_im is None and self.obs_ax is None:\n fig, self.obs_ax = plt.subplots()\n self.obs_ax.set_title('Observation')\n self.obs_ax.set_xticks(())\n self.obs_ax.set_yticks(())\n self.obs_im = self.obs_ax.imshow(self.obs, cmap='gray')\n else:\n self.obs_im.set_data(self.obs)",
"def test_point_plot(self):\n clf()\n filename = 'points_plot.png'\n N = 10\n points = GeoSeries(Point(i, i) for i in xrange(N))\n ax = points.plot()\n self._compare_images(ax=ax, filename=filename)",
"def plot(self):\n self.plotsite()\n self.plotbond()\n plt.show()",
"def test_random_multi_image():\n\n shap.image_plot([np.random.randn(3, 20, 20) for i in range(3)], np.random.randn(3, 20, 20), show=False)",
"def display(array):\n plt.figure()\n plt.imshow(array)\n plt.show()",
"def plot(self):\n fx = self.fitness_functions(self.archive)\n n = len(fx[0])\n\n if n == 2:\n plt.xlabel(\"F1\")\n plt.ylabel(\"F2\")\n plt.suptitle(\"Pareto Front\")\n plt.scatter(fx[:,0], fx[:,1], label='Archive')\n plt.show()\n elif n == 3:\n plt.figure()\n ax = plt.axes(projection='3d')\n ax.scatter(fx[:, 0], fx[:, 1], fx[:, 2])\n ax.set_xlabel(\"F1\")\n ax.set_ylabel(\"F2\")\n ax.set_zlabel(\"F3\")\n plt.suptitle(\"Pareto Front of Archive\")\n plt.show()\n else:\n print(\"Cannot Print Multi-Dimensional Front greater than 3D\")",
"def get_plot(sample):\n scale = (CANVAS_DIM/PATCH_DIM)\n ego_pose = sample[0]\n map_mask = sample[2]\n\n fig, ax = plt.subplots()\n ax.set_ylim([0, CANVAS_DIM]) # set the bounds to be 10, 10\n ax.set_xlim([0, CANVAS_DIM])\n ax.imshow(map_mask[0])\n\n for vehicle in sample[1]:\n plot_vehicle(ax, vehicle, ego_pose, scale)\n\n plt.show()",
"def make_spark(pricestack):\n _x = pricestack - np.mean(pricestack)\n fig, _ax = plt.subplots(1, 1, figsize=(10, 3))\n plt.plot(_x, color='k', linewidth=6)\n plt.plot(len(_x) - 1, _x[-1], color='r', marker='o')\n\n for _, i in _ax.spines.items():\n i.set_visible(False)\n _ax.set_xticks = ([])\n _ax.set_yticks = ([])\n _ax.axhline(c='k', linewidth=4, linestyle=(0, (5, 2, 1, 2)))\n\n buf = BytesIO()\n plt.savefig(buf, format='png', dpi=17)\n buf.seek(0)\n imgspk = Image.open(buf)\n\n plt.clf()\n _ax.cla()\n plt.close(fig)\n return imgspk",
"def show_plot(points, save=False, name=\"plot\"):\n plt.figure()\n fig, ax = plt.subplots()\n # this locator puts ticks at regular intervals\n loc = ticker.MultipleLocator(base=0.2)\n ax.yaxis.set_major_locator(loc)\n plt.plot(points)\n if save:\n if not os.path.isdir(\"imgs\"):\n os.mkdir(\"imgs\")\n plt.savefig(\"imgs/\" + name + \".jpg\")\n plt.show()",
"def save_plot_as_image(self):\r\n plt.savefig(ROOT_DIR + '/presentation/images/' + self.folder + '/' + self.generated_image_name + '.png',\r\n bbox_inches='tight')",
"def plot(self, filename='model.png'):\n plot_model(self.model, to_file=filename)",
"def plot_img(img, savefig=\"test.png\", **kwargs):\n plt.figure()\n if img.ndim > 2:\n plt.imshow(cv2.cvtColor(img.astype(np.uint8), cv2.COLOR_BGR2RGB), **kwargs)\n else:\n plt.imshow(img.astype(np.uint8), **kwargs)\n plt.axis(\"off\")\n if savefig:\n cv2.imwrite(savefig, img.astype(np.uint8))",
"def show_shot(path_to_images, name_image):\n crrt_image = misc.imread(\"./{}/{}\".format(path_to_images, name_image))\n\n plt.imshow(crrt_image)\n\n plt.draw()\n plt.pause(0.5)",
"def plot_phot_transform(params, inst_mag, cal_mag, bandpass):\n\n fig = plt.figure(2)\n\n plt.plot(cal_mag, inst_mag,'k.')\n\n plt.xlabel('Catalog magnitude')\n\n plt.ylabel('Instrumental magnitude')\n\n plt.title('Relation between instrumental and catalogue magnitudes in '+\\\n bandpass)\n\n [xmin,xmax,ymin,ymax] = plt.axis()\n\n plt.axis([xmax,xmin,ymax,ymin])\n\n plt.savefig(path.join(params['red_dir'],\n 'phot_transform_'+bandpass+'.eps'))\n\n plt.close(2)",
"def plot_cv_img(input_image): \n # change color channels order for matplotlib \n plt.imshow(cv2.cvtColor(input_image, cv2.COLOR_BGR2RGB)) \n\n # For easier view, turn off axis around image \n plt.axis('off')\n plt.show()",
"def plot(self, A = None):\r\n if A is not None: self.A = A\r\n self.im.set_data(self.A)\r\n self.ax.draw_artist(self.im)\r\n self.fig.canvas.blit(self.ax.bbox)"
] | [
"0.68390614",
"0.667332",
"0.6661213",
"0.662219",
"0.655755",
"0.655406",
"0.6526125",
"0.6526125",
"0.6526125",
"0.6519338",
"0.64991987",
"0.64458805",
"0.6437348",
"0.643601",
"0.6406592",
"0.63417786",
"0.6327013",
"0.63187444",
"0.6312867",
"0.6310806",
"0.63035756",
"0.62978315",
"0.62460405",
"0.62263805",
"0.62197894",
"0.6213917",
"0.61846524",
"0.6184388",
"0.6179067",
"0.6170574",
"0.61483645",
"0.61396897",
"0.61332476",
"0.6129002",
"0.61220676",
"0.6109591",
"0.6094141",
"0.6083547",
"0.6070591",
"0.6070088",
"0.60581434",
"0.60491556",
"0.6042744",
"0.60376614",
"0.6031594",
"0.60124785",
"0.6003811",
"0.6003094",
"0.60006934",
"0.5999655",
"0.5998243",
"0.59841454",
"0.59795636",
"0.59750676",
"0.5972523",
"0.59691733",
"0.5964624",
"0.5960006",
"0.5951938",
"0.5945934",
"0.59376717",
"0.59338784",
"0.5929707",
"0.5915173",
"0.5912721",
"0.5912721",
"0.5912721",
"0.59100926",
"0.59086394",
"0.5906128",
"0.59033126",
"0.5900286",
"0.5897591",
"0.5886689",
"0.58865786",
"0.5885044",
"0.58826154",
"0.5881233",
"0.58703184",
"0.58681434",
"0.5860439",
"0.58595186",
"0.58583105",
"0.58578694",
"0.5854598",
"0.5853223",
"0.5852427",
"0.58511347",
"0.58509636",
"0.58502096",
"0.58471304",
"0.5844103",
"0.5844085",
"0.5842731",
"0.58401173",
"0.5833452",
"0.5830231",
"0.5813183",
"0.5807831",
"0.57949007"
] | 0.6686292 | 1 |
Given an oversampled PSF (typically 0.51.0 mas spaxels), it calculates the Ensquared Energy of the central spaxel in a new_scale (4, 10, 20 mas) It selects a window of size new_scale and adds up the Intensity of those pixels | def ensquared_one_pix(array, pix_scale, new_scale=40, plot=True):
n = int(new_scale // pix_scale)
minPix, maxPix = (pix + 1 - n) // 2, (pix + 1 + n) // 2
ens = array[minPix:maxPix, minPix:maxPix]
# print(ens.shape)
energy = np.sum(ens)
if plot:
mapp = 'viridis'
f, (ax1, ax2) = plt.subplots(1, 2)
ax1 = plt.subplot(1, 2, 1)
square = Rectangle((minPix-0.5, minPix-0.5), n, n, linestyle='--', fill=None, color='white')
ax1.add_patch(square)
img1 = ax1.imshow(array, cmap=mapp)
ax1.set_title('%.1f mas pixels' % (pix_scale))
img1.set_clim(0, 1)
plt.colorbar(img1, ax=ax1, orientation='horizontal')
ax2 = plt.subplot(1, 2, 2)
img2 = ax2.imshow(ens, cmap=mapp)
ax2.set_title('%d mas window' %new_scale)
img1.set_clim(0, 1)
plt.colorbar(img2, ax=ax2, orientation='horizontal')
return energy | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _scale_psf(self, input_irf_file, config):\n\n # Find all \"sigma\" values - tells how many PSF components we have in the IRF file\n column_names = [col.name.lower() for col in input_irf_file['POINT SPREAD FUNCTION'].columns]\n sigma_columns = list(filter(lambda s: \"sigma\" in s.lower(), column_names))\n\n # --------------------------\n # Reading the PSF parameters\n self._psf = dict()\n self._psf['Elow'] = input_irf_file['POINT SPREAD FUNCTION'].data['Energ_lo'][0].copy()\n self._psf['Ehigh'] = input_irf_file['POINT SPREAD FUNCTION'].data['Energ_hi'][0].copy()\n self._psf['ThetaLow'] = input_irf_file['POINT SPREAD FUNCTION'].data['Theta_lo'][0].copy()\n self._psf['ThetaHi'] = input_irf_file['POINT SPREAD FUNCTION'].data['Theta_hi'][0].copy()\n\n for i in range(0, len(sigma_columns)):\n sigma_name = 'sigma_{:d}'.format(i + 1)\n self._psf[sigma_name] = input_irf_file['POINT SPREAD FUNCTION'].data[sigma_name][0].transpose().copy()\n\n self._psf['E'] = scipy.sqrt(self._psf['Elow'] * self._psf['Ehigh'])\n self._psf['Theta'] = (self._psf['ThetaLow'] + self._psf['ThetaHi']) / 2.0\n # --------------------------\n\n # Creating the energy-theta mesh grid\n energy, theta = scipy.meshgrid(self._psf['E'], self._psf['Theta'], indexing='ij')\n\n # ---------------------------------\n # Scaling the PSF energy dependence\n\n # Constant error function\n if config['energy_scaling']['err_func_type'] == \"constant\":\n scale_params = config['energy_scaling'][\"constant\"]\n # Constant scaling. Loop over all \"sigma\" values and scale them by the same factor.\n for sigma_column in sigma_columns:\n self._psf[sigma_column + '_new'] = scale_params['scale'] * self._psf[sigma_column]\n\n # Gradients error function\n elif config['energy_scaling']['err_func_type'] == \"gradient\":\n scale_params = config['energy_scaling'][\"gradient\"]\n for sigma_column in sigma_columns:\n self._psf[sigma_column + '_new'] = self._psf[sigma_column] * (\n 1 + scale_params['scale'] * gradient(scipy.log10(energy),\n scipy.log10(scale_params['range_min']),\n scipy.log10(scale_params['range_max']))\n )\n\n # Step error function\n elif config['energy_scaling']['err_func_type'] == \"step\":\n scale_params = config['energy_scaling'][\"step\"]\n break_points = list(zip(scipy.log10(scale_params['transition_pos']),\n scale_params['transition_widths']))\n\n for sigma_column in sigma_columns:\n self._psf[sigma_column + '_new'] = self._psf[sigma_column] * (\n 1 + scale_params['scale'] * step(scipy.log10(energy), break_points)\n )\n\n else:\n raise ValueError(\"Unknown PSF scaling function {:s}\"\n .format(config['energy_scaling']['err_func_type']))\n # ---------------------------------\n\n # ---------------------------------\n # Scaling the PSF angular dependence\n\n # Constant error function\n if config['angular_scaling']['err_func_type'] == \"constant\":\n scale_params = config['angular_scaling'][\"constant\"]\n # Constant scaling. Loop over all \"sigma\" values and scale them by the same factor.\n for sigma_column in sigma_columns:\n # input_irf_file['POINT SPREAD FUNCTION'].data[sigma_column] *= scale_params['scale']\n self._psf[sigma_column + '_new'] = scale_params['scale'] * self._psf[sigma_column + '_new']\n\n # Gradients error function\n elif config['angular_scaling']['err_func_type'] == \"gradient\":\n scale_params = config['angular_scaling'][\"gradient\"]\n for sigma_column in sigma_columns:\n self._psf[sigma_column + '_new'] = self._psf[sigma_column + '_new'] * (\n 1 + scale_params['scale'] * gradient(theta,\n scale_params['range_min'],\n scale_params['range_max'])\n )\n\n # Step error function\n elif config['angular_scaling']['err_func_type'] == \"step\":\n scale_params = config['angular_scaling'][\"step\"]\n break_points = list(zip(scale_params['transition_pos'],\n scale_params['transition_widths']))\n\n for sigma_column in sigma_columns:\n self._psf[sigma_column + '_new'] = self._psf[sigma_column + '_new'] * (\n 1 + scale_params['scale'] * step(theta, break_points)\n )\n\n else:\n raise ValueError(\"Unknown PSF scaling function {:s}\"\n .format(config['angular_scaling']['err_func_type']))\n # ---------------------------------\n\n # Recording the scaled PSF\n for i in range(0, len(sigma_columns)):\n sigma_name = 'sigma_{:d}'.format(i + 1)\n\n input_irf_file['POINT SPREAD FUNCTION'].data[sigma_name][0] = self._psf[sigma_name + '_new'].transpose()",
"def createIntegratedPsf(self):\n\n (wavelengths, weights) = self.filter\n for i in range(len(wavelengths)):\n\n wavelength = wavelengths[i]\n weight = weights[i]\n self.convertToOpd(wavelength) # creates self.opd\n opd = self.embedOpd()\n zf = numpy.fft.fft2(opd)\n del opd\n # Compute the amplitude squared.\n # (psf is not really the point spread function yet)\n psf = np.conjugate(zf)\n # psf will now be the point spread function, but still complex\n np.multiply(psf, zf, psf)\n del zf\n # normalize the PSF, and convert to single precision\n psf = psf.real / psf.size\n psf = psf.astype(np.float32)\n\n self.center(psf)\n\n # This describes the image scale if no resampling is done.\n cdelt_before_resampling = (wavelength * MICRONStoMETERS) / \\\n (self.D * self.oversample) * RADIANStoDEGREES\n if self.pixel_size is None:\n # we won't resample the output image\n self.cdelt = cdelt_before_resampling\n # Extract a subset.\n if self.output_size < self.npix:\n o_npix = self.output_size\n n0 = (self.npix - o_npix) // 2\n self.integrated_psf += \\\n (psf[n0:n0 + o_npix, n0:n0 + o_npix] * weight)\n else:\n self.integrated_psf += (psf * weight)\n else:\n # we'll resample to this image scale\n self.cdelt = self.pixel_size / self.oversample * ARCSECtoDEGREES\n # These three parameters are only used by mapPsf and for\n # normalizing the weight after resampling.\n self.rescale = self.cdelt / cdelt_before_resampling\n self.input_center = (self.npix + 1) // 2\n self.output_center = (self.output_size + 1) // 2\n sub_psf = np.zeros((self.output_size, self.output_size),\n dtype=np.float32)\n # Do the resampling, writing the output to sub_psf.\n ndimage.geometric_transform(psf, self.mapPsf,\n output_shape=(self.output_size, self.output_size),\n output=sub_psf, prefilter=True)\n weight = weight * self.rescale**2\n self.integrated_psf += (sub_psf * weight)\n del sub_psf\n\n if self.verbose:\n print(\"PSF for wavelength %g has been computed\" % wavelength)",
"def new_scaled_energy(run, smoother=\"pol2\"):\n get_from_ccdb(run)\n endpoint_calib = ROOT.pstags().endpoint_calib\n endpoint_energy = ROOT.pstags().endpoint_energy\n fout = open(f\"new_scaled_energy.{run}\", \"w\")\n Eps_tagm = ROOT.gROOT.FindObject(\"Epair_Etagm_fit\")\n if not Eps_tagm:\n Eps_tagm = ROOT.gROOT.FindObject(\"Epair_Etagm\")\n if not Eps_tagm:\n Eps_tagm = plot_Etagm_Epair(run)[0]\n Eps_tagm.Fit(smoother)\n for func in Eps_tagm.GetListOfFunctions():\n ntagm = Eps_tagm.GetNbinsX()\n for i in range(ntagm):\n Elow = Eps_tagm.GetXaxis().GetBinLowEdge(102-i)\n Ehigh = Eps_tagm.GetXaxis().GetBinUpEdge(102-i)\n f = [(endpoint_calib - endpoint_energy + func.Eval(E)) /\n endpoint_calib for E in (Elow, Ehigh)]\n fout.write(f\"{i+1} {f[0]} {f[1]}\\n\")\n break",
"def _scale_edisp(self, input_irf_file, config):\n\n # Reading the Energy parameters\n self._edisp = dict()\n self._edisp['Mlow'] = input_irf_file['ENERGY DISPERSION'].data['MIGRA_LO'][0].copy()\n self._edisp['Mhigh'] = input_irf_file['ENERGY DISPERSION'].data['MIGRA_HI'][0].copy()\n self._edisp['M'] = (self._edisp['Mlow'] + self._edisp['Mhigh']) / 2.0\n\n # -------------------------------------------\n # Scaling the Energy dependence\n\n # Constant error function\n if config['energy_scaling']['err_func_type'] == \"constant\":\n scaling_params = config['energy_scaling']['constant']['scale']\n self._edisp['Mhigh_new'] = self._edisp['Mhigh'] * (scaling_params)\n self._edisp['Mlow_new'] = self._edisp['Mlow'] * (scaling_params)\n\n # Gradients error function\n elif config['energy_scaling']['err_func_type'] == \"gradient\":\n scaling_params = config['energy_scaling']['gradient']\n self._edisp['Mhigh_new'] = self._edisp['Mhigh'] * (\n 1. + scaling_params['scale'] * gradient(scipy.log10(self._edisp['Mhigh']),\n scipy.log10(scaling_params['range_min']),\n scipy.log10(scaling_params['range_max'])) \n )\n self._edisp['Mlow_new'] = self._edisp['Mlow'] * (\n 1. + scaling_params['scale'] * gradient(scipy.log10(self._edisp['Mlow']),\n scipy.log10(scaling_params['range_min']),\n scipy.log10(scaling_params['range_max'])) \n )\n # Step error function\n elif config['energy_scaling']['err_func_type'] == \"step\":\n scaling_params = config['energy_scaling']['step']\n break_points = list(zip(scipy.log10(scaling_params['transition_pos']),\n scaling_params['transition_widths']))\n self._edisp['Mhigh_new'] = self._edisp['Mhigh']* (\n 1 + scaling_params['scale'] * step(scipy.log10(self._edisp['Mhigh']), break_points)\n )\n self._edisp['Mlow_new'] = self._edisp['Mlow']* (\n 1 + scaling_params['scale'] * step(scipy.log10(self._edisp['Mlow']), break_points)\n )\n else:\n raise ValueError(\"Edisp energy scaling: unknown scaling function type '{:s}'\"\n .format(config['energy_scaling']['err_func_type'])\n )\n # ------------------------------------------\n # Recording the scaled variables\n input_irf_file['ENERGY DISPERSION'].data['MIGRA_HI'][0] = self._edisp['Mhigh_new']\n input_irf_file['ENERGY DISPERSION'].data['MIGRA_LO'][0] = self._edisp['Mlow_new']\n self._edisp['M_new'] = (self._edisp['Mlow_new'] + self._edisp['Mhigh_new']) / 2.0",
"def generate_fgs_fsw_coefficients(siaf=None, verbose=False, scale=0.06738281367):\n if siaf is None:\n siaf = pysiaf.Siaf('fgs')\n\n instrument = 'FGS'\n\n pre_delivery_dir = os.path.join(JWST_DELIVERY_DATA_ROOT, instrument)\n if not os.path.isdir(pre_delivery_dir):\n os.makedirs(pre_delivery_dir)\n\n for aperture_name in ['FGS1_FULL_OSS', 'FGS2_FULL_OSS']:\n\n aperture = siaf[aperture_name]\n\n # center_offset_x = 1023.5\n # center_offset_y = 1023.5\n center_offset_x = aperture.XSciRef - 1.\n center_offset_y = aperture.YSciRef - 1.\n\n if verbose:\n print('External scale {}'.format(scale))\n print(aperture.get_polynomial_scales())\n\n # get SIAF coefficients\n coefficients = aperture.get_polynomial_coefficients()\n\n ar = coefficients['Sci2IdlX']\n br = coefficients['Sci2IdlY']\n cr = coefficients['Idl2SciX']\n dr = coefficients['Idl2SciY']\n\n a_fsw, b_fsw, c_fsw, d_fsw = polynomial.rescale(ar, br, cr, dr, 1. / scale)\n factor = -1.\n\n if 'FGS1' in aperture_name:\n b_fsw *= -1\n c_fsw = polynomial.flip_y(c_fsw)\n d_fsw = polynomial.flip_y(d_fsw)\n\n a_fsw = polynomial.shift_coefficients(a_fsw, factor * center_offset_x,\n factor * center_offset_y)\n b_fsw = polynomial.shift_coefficients(b_fsw, factor * center_offset_x,\n factor * center_offset_y)\n c_fsw = polynomial.shift_coefficients(c_fsw, factor * center_offset_x,\n factor * center_offset_y)\n d_fsw = polynomial.shift_coefficients(d_fsw, factor * center_offset_x,\n factor * center_offset_y)\n\n a_fsw[0] += center_offset_x\n b_fsw[0] += center_offset_y\n c_fsw[0] += center_offset_x\n d_fsw[0] += center_offset_y\n\n # print FSW coefficients to screen\n fsw_coefficients = Table((c_fsw, d_fsw, a_fsw, b_fsw), names=(\n 'IDEALPTOREALPXCOE', 'IDEALPTOREALPYCOE', 'REALPTOIDEALPXCOE', 'REALPTOIDEALPYCOE'))\n if verbose:\n fsw_coefficients.pprint()\n\n table = Table(names=('parameter_name', 'value'), dtype=(object, float))\n table.add_row(['XOFFSET', center_offset_x])\n table.add_row(['YOFFSET', center_offset_y])\n table.add_row(['PLATESCALE', scale])\n for colname in fsw_coefficients.colnames:\n for i in range(len(fsw_coefficients[colname])):\n table.add_row(['{}_{}'.format(colname, i), fsw_coefficients[colname][i]])\n table['parameter_name'] = np.array(table['parameter_name']).astype(str)\n\n # write to file\n fsw_distortion_file = os.path.join(pre_delivery_dir, 'ifgs{}_distortion_tbl.txt'.format(aperture_name[3]))\n comments = []\n comments.append('FGS distortion coefficients for FSW')\n comments.append('')\n comments.append('Derived from SIAF distortion coefficients.')\n comments.append('')\n comments.append('Generated {} {}'.format(timestamp.isot, timestamp.scale))\n comments.append('by {}'.format(username))\n comments.append('')\n table.meta['comments'] = comments\n formats={'parameter_name': '%-20s', 'value': '%+2.6e'}\n table.write(fsw_distortion_file, format='ascii.fixed_width',\n delimiter=',', delimiter_pad=' ', bookend=False,\n overwrite=True, formats=formats)",
"def scaling():\n \n for i in range(cfg.nfea):\n dm = 0\n var = 0\n for j in range(cfg.ntrain):\n dm += cfg.a[j,i]\n dm = dm/cfg.ntrain\n \n for j in range(cfg.ntrain):\n var += (cfg.a[j,i]-dm)**2\n\n var = var/cfg.ntrain\n var = np.sqrt(var)\n \n if var >= 10**(-5):\n cfg.clin[i] = 1.0/var \n cfg.dlin[i] = -dm/var \n \n else: \n if np.abs(dm)<=1.0:\n cfg.clin[i] = 1.0\n cfg.dlin[i] = 0.0 \n else: \n cfg.clin[i] = 1.0/dm\n cfg.dlin[i] = 0.0 \n \n for j in range(cfg.ntrain):\n cfg.a_scaled[j,i] = cfg.clin[i]*cfg.a[j,i] + cfg.dlin[i]\n \n return",
"def get_psf_scale_map(self):\n\n scale_map = dict()\n\n scale_map['E_edges'] = scipy.concatenate((self._psf['Elow'], [self._psf['Ehigh'][-1]]))\n scale_map['Theta_edges'] = scipy.concatenate((self._psf['ThetaLow'], [self._psf['ThetaHi'][-1]]))\n\n # Find all \"sigma\" values - tells how many PSF components we have in the IRF file\n column_names = self._psf.keys()\n sigma_columns = list(filter(lambda s: (\"sigma\" in s.lower()) and not (\"new\" in s.lower()),\n column_names))\n\n for sigma_column in sigma_columns:\n # Avoiding division by zero\n can_divide = self._psf[sigma_column] > 0\n\n scale_map[sigma_column] = scipy.zeros_like(self._psf[sigma_column])\n scale_map[sigma_column][can_divide] = self._psf[sigma_column + '_new'][can_divide] / self._psf[sigma_column][can_divide]\n\n wh_nan = scipy.where(scipy.isnan(scale_map[sigma_column]))\n scale_map[sigma_column][wh_nan] = 0\n scale_map[sigma_column] -= 1\n\n return scale_map",
"def scale(self, sf):\n self.scale(sf, sf)",
"def reScaleLandsat(self,img):\n \n\t\tthermalBand = ee.List(['thermal'])\n\t\tthermal = ee.Image(img).select(thermalBand).multiply(10)\n \n\t\totherBands = ee.Image(img).bandNames().removeAll(thermalBand)\n\t\tscaled = ee.Image(img).select(otherBands).divide(0.0001)\n \n\t\timage = ee.Image(scaled.addBands(thermal)).int16()\n \n\t\treturn image.copyProperties(img)",
"def get_scale():\r\n\r\n \r\n return 0.5",
"def _eta_sfr_scaling(self,x,q):\n i = self.enum[q]\n A = self.scaling_params['A'][i]\n b = self.scaling_params['b'][i]\n return A*x**b",
"def finalSES(mat, C):\n\tscaleToMin, medianScore, maxScore = SES(mat, C)\n\tprint(f\"Background Score: {scaleToMin} \\tMax Score: {maxScore} \\tMedian Score: {medianScore}\")\n\treturn scaling(mat, medianScore)",
"def scale_psf_fluxes(frame, psf):\n scale_factor = (max_flux(frame) / max_flux(psf))\n return psf.profile * scale_factor, psf.fluxes * scale_factor",
"def s_multiplier(self):\n return 4 * np.pi * (self.bins[:, 1]/2)**2",
"def scale(self):",
"def gauss_seeing(npix = None,fwhm=None,e1=None,e2=None,scale=scale):\n fwhm = fwhm/scale\n M20 = 2.*(fwhm/2.35482)**2\n row,col = np.mgrid[-npix/2:npix/2,-npix/2:npix/2]\n rowc = row.mean()\n colc = col.mean()\n Mcc = 0.5*M20*(1+e1)\n Mrc = 0.5*e2*M20\n Mrr = 0.5*M20*(1-e1)\n rho = Mrc/np.sqrt(Mcc*Mrr)\n img = np.exp(-0.5/(1-rho**2)*(row**2/Mrr + col**2/Mcc - 2*rho*row*col/np.sqrt(Mrr*Mcc)))\n res = img/img.sum()\n return res",
"def run(self):\n #calculate platescale of first input image\n try:\n det = np.linalg.det(wcs.WCS(self.datain[0].header).wcs.cd)\n pscale = np.sqrt(np.abs(det))*3600.\n except:\n try:\n det = np.linalg.det(wcs.WCS(self.datain[0].header).wcs.pc)\n pscale = np.sqrt(np.abs(det))*3600.\n except:\n pscale = self.datain[0].header['PIXSCAL']\n #filtering out images which are too far away from the others\n #passing images added to a list of (image, WCS) tuples\n '''\n image_centers = []\n for f in self.datain:\n image_centers.append((f.header['CRVAL1'], f.header['CRVAL2']))\n filtered_datain = []\n dist_list = [[[0]*(len(image_centers)-1)]*len(image_centers)]\n for i in range(len(image_centers)):\n for j in range(len(image_centers)-1):\n dist_list[i][j+1] = np.sqrt((image_)**2+()**2)\n '''\n #calculations necessary for updating wcs information\n px = []\n py = []\n \n #in order to avoid NaN interactions, creating weight map\n weights=[]\n for f in self.datain:\n weights.append((np.where(np.isnan(f.image) == True, 0, 1)))\n \n for f in self.datain:\n px.extend(wcs.WCS(f.header).calc_footprint()[:,0])\n py.extend(wcs.WCS(f.header).calc_footprint()[:,1])\n x0 = (max(px)+min(px))/2.\n y0 = (max(py)+min(py))/2.\n sx = (max(px)-min(px))*np.cos(y0/180*np.pi) # arcsec\n sy = (max(py)-min(py)) # arcsec\n size = (sx*3600+self.getarg('pad')*2, sy*3600+self.getarg('pad')*2)\n xpix = size[0]//pscale\n ypix = size[1]//pscale\n cdelt = [pscale/3600.]*2\n \n #create self.dataout and give it a copy of an input's header\n self.dataout = DataFits(config = self.config)\n self.dataout.header = self.datain[0].header.copy()\n \n #update header wcs information\n self.log.info('Creating new WCS header')\n \n self.dataout.header['CRPIX1'] = xpix/2\n self.dataout.header['CRPIX2'] = ypix/2\n self.dataout.header['CRVAL1'] = x0\n self.dataout.header['CRVAL2'] = y0\n self.dataout.header['CD1_1'] = -cdelt[0]\n self.dataout.header['CD1_2'] = self.dataout.header['CD2_1'] = 0.\n self.dataout.header['CD2_2'] = cdelt[1]\n self.dataout.header['NAXIS1'] = int(xpix)\n self.dataout.header['NAXIS2'] = int(ypix)\n self.dataout.header['CTYPE1'] = 'RA---TAN-SIP'\n self.dataout.header['CTYPE2'] = 'DEC--TAN-SIP'\n self.dataout.header['RADESYS'] = 'ICRS'\n self.dataout.header['EQUINOX'] = 2000\n self.dataout.header['LATPOLE'] = self.datain[0].header['CRVAL2']\n self.dataout.header['LONPOLE'] = 180\n self.dataout.header['PIXASEC'] = pscale\n \n theta_rad = np.deg2rad(self.getarg('outangle'))\n rot_matrix = np.array([[np.cos(theta_rad), -np.sin(theta_rad)], \n [np.sin(theta_rad), np.cos(theta_rad)]])\n rot_cd = np.dot(rot_matrix, np.array([[self.dataout.header['CD1_1'], 0.],[0., self.dataout.header['CD2_2']]]))\n for i in [0,1]:\n for j in [0,1]:\n self.dataout.header['CD{0:d}_{1:d}'.format(i+1, j+1)] = rot_cd[i,j]\n \n #check drizzle arguments\n if self.getarg('kernel') == 'smoothing':\n kernel = 'lanczos3'\n elif self.getarg('kernel') in ['square', 'point', 'gaussian', 'tophat']:\n kernel = self.getarg('kernel')\n else:\n self.log.error('Kernel name not recognized, using default')\n kernel = 'square'\n if self.getarg('drizzleweights') == 'uniform':\n driz_wt = ''\n elif self.getarg('drizzleweights') in ['exptime', 'expsq']:\n driz_wt = self.getarg('drizzleweights')\n else:\n self.log.error('Drizzle weighting not recognized, using default')\n driz_wt = ''\n \n #create drizzle object and add input images\n fullwcs = wcs.WCS(self.dataout.header)\n self.log.info('Starting drizzle')\n driz = drz.Drizzle(outwcs = fullwcs, pixfrac=self.getarg('pixfrac'), \\\n kernel=kernel, fillval='10000', wt_scl=driz_wt)\n for i,f in enumerate(self.datain):\n self.log.info('Adding %s to drizzle stack' % f.filename)\n driz.add_image(f.imgdata[0], wcs.WCS(f.header), inwht=weights[i])\n \n try:\n fillval=float(self.getarg('fillval'))\n except:\n fillval=np.nan\n self.log.error('Fillvalue not recognized or missing, using default')\n \n #creates output fits file from drizzle output\n self.dataout.imageset(np.where(driz.outsci == 10000, fillval, driz.outsci))\n self.dataout.imageset(driz.outwht,'OutWeight', self.dataout.header)\n self.dataout.filename = self.datain[0].filename\n\n #add history\n self.dataout.setheadval('HISTORY','Coadd: %d files combined with %s kernel, pixfrac %f at %f times resolution' \\\n % (len(self.datain), kernel, self.getarg('pixfrac'), self.getarg('resolution')))",
"def get_scale_freq():\n return sf / 2 / (num_freq-1)",
"def scaleLandsat(self,img):\n\t\tthermal = img.select(ee.List(['thermal'])).multiply(0.1)\n\t\tscaled = ee.Image(img).select(self.env.divideBands).multiply(ee.Number(0.0001))\n\t\t\n\t\treturn img.select([]).addBands(scaled).addBands(thermal)",
"def fit_ePSF(self, sci, center=None, origin=[0,0], ivar=1, N=7, \n filter='F140W', tol=1.e-4, guess=None, get_extended=False):\n from scipy.optimize import minimize\n \n sh = sci.shape\n if center is None:\n y0, x0 = np.array(sh)/2.-1\n else:\n x0, y0 = center\n \n xd = x0+origin[1]\n yd = y0+origin[0]\n \n xc, yc = int(x0), int(y0)\n \n psf_xy = self.get_at_position(x=xd, y=yd, filter=filter)\n \n yp, xp = np.indices(sh)\n \n if guess is None:\n if np.isscalar(ivar):\n ix = np.argmax(sci.flatten())\n else:\n ix = np.argmax((sci*(ivar > 0)).flatten())\n \n xguess = xp.flatten()[ix]\n yguess = yp.flatten()[ix]\n else:\n xguess, yguess = guess\n \n guess = [sci[yc-N:yc+N, xc-N:xc+N].sum()/psf_xy.sum(), xguess, yguess, 0, 0, 0, 0]\n sly = slice(yc-N, yc+N); slx = slice(xc-N, xc+N)\n sly = slice(yguess-N, yguess+N); slx = slice(xguess-N, xguess+N)\n \n if get_extended:\n extended_data = self.extended_epsf[filter]\n else:\n extended_data = None\n \n args = (self, psf_xy, sci[sly, slx], ivar[sly, slx], xp[sly, slx], yp[sly, slx], extended_data, 'chi2')\n \n out = minimize(self.objective_epsf, guess, args=args, method='Powell', tol=tol)\n \n psf_params = out.x\n psf_params[1] -= x0\n psf_params[2] -= y0\n \n return psf_params\n \n # dx = xp-psf_params[1]\n # dy = yp-psf_params[2]\n # output_psf = self.eval_ePSF(psf_xy, dx, dy)*psf_params[0]\n # \n # return output_psf, psf_params",
"def shear_est(self, gal_image, psf_image, noise=None, F=False):\n # gal_ps = self.pow_spec(gal_image)\n gal_ps = gal_image\n # gal_ps = hk_tool_box.smooth(gal_ps,self.size)\n if noise is not None:\n nbg = self.pow_spec(noise)\n self.flux2 = numpy.sqrt(gal_ps[int(self.size/2), int(self.size/2)]/numpy.sum(self.rim*gal_ps)*numpy.sum(self.rim))\n # nbg = hk_tool_box.smooth(nbg,self.size)\n # rim = self.border(2, size)\n # n = numpy.sum(rim)\n # gal_pn = numpy.sum(gal_ps*rim)/n # the Possion noise of galaxy image\n # nbg_pn = numpy.sum(nbg*rim)/n # the Possion noise of background noise image\n gal_ps = gal_ps - nbg# + nbg_pn - gal_pn\n\n if F:\n psf_ps = psf_image\n else:\n psf_ps = self.pow_spec(psf_image)\n # self.get_radius_new(psf_ps, 2)\n wb, beta = self.wbeta(self.hlr)\n maxi = numpy.max(psf_ps)\n idx = psf_ps < maxi / 100000.\n wb[idx] = 0\n psf_ps[idx] = 1.\n tk = wb/psf_ps * gal_ps\n\n # ky, kx = self.ky, self.kx\n # #\n # kx2 = kx*kx\n # ky2 = ky*ky\n # kxy = kx*ky\n # k2 = kx2 + ky2\n # k4 = k2*k2\n # mn1 = (-0.5)*(kx2 - ky2) # (-0.5)*(kx**2 - ky**2)\n # mn2 = -kxy # -kx*ky\n # mn3 = k2 - 0.5*beta**2*k4 # kx**2 + ky**2 - 0.5*beta**2*(kx**2 + ky**2)**2\n # mn4 = k4 - 8*kx2*ky2 # kx**4 - 6*kx**2*ky**2 + ky**4\n # mn5 = kxy*(kx2 - ky2) # kx**3*ky - kx*ky**3\n\n # mn1 = self.mn1\n # mn2 = self.mn2\n mn3 = self.k2 - 0.5*beta**2*self.k4\n # mn4 = self.mn4\n # mn5 = self.mn5\n\n mg1 = numpy.sum(self.mn1 * tk)*self.alpha\n mg2 = numpy.sum(self.mn2 * tk)*self.alpha\n mn = numpy.sum(mn3 * tk)*self.alpha\n mu = numpy.sum(self.mn4 * tk)*(-0.5*beta**2)*self.alpha\n mv = numpy.sum(self.mn5 * tk)*(-2.*beta**2)*self.alpha\n\n return mg1, mg2, mn, mu, mv",
"def ellipse_sbprofile(ellipsefit, minerr=0.0, snrmin=1.0, sma_not_radius=False,\n cut_on_cog=False, sdss=False, linear=False):\n sbprofile = dict()\n bands = ellipsefit['bands']\n if 'refpixscale' in ellipsefit.keys():\n pixscale = ellipsefit['refpixscale']\n else:\n pixscale = ellipsefit['pixscale']\n eps = ellipsefit['eps_moment']\n if 'redshift' in ellipsefit.keys():\n sbprofile['redshift'] = ellipsefit['redshift'] \n \n for filt in bands:\n psfkey = 'psfsize_{}'.format(filt.lower())\n if psfkey in ellipsefit.keys():\n sbprofile[psfkey] = ellipsefit[psfkey]\n\n sbprofile['minerr'] = minerr\n sbprofile['smaunit'] = 'pixels'\n sbprofile['radiusunit'] = 'arcsec'\n\n # semi-major axis and circularized radius\n #sbprofile['sma'] = ellipsefit[bands[0]].sma * pixscale # [arcsec]\n\n for filt in bands:\n #area = ellipsefit[filt].sarea[indx] * pixscale**2\n\n sma = np.atleast_1d(ellipsefit['sma_{}'.format(filt.lower())]) # semi-major axis [pixels]\n sb = np.atleast_1d(ellipsefit['intens_{}'.format(filt.lower())]) # [nanomaggies/arcsec2]\n sberr = np.atleast_1d(np.sqrt(ellipsefit['intens_err_{}'.format(filt.lower())]**2 + (0.4 * np.log(10) * sb * minerr)**2))\n \n if sma_not_radius:\n radius = sma * pixscale # [arcsec]\n else:\n radius = sma * np.sqrt(1 - eps) * pixscale # circularized radius [arcsec]\n\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n if linear:\n keep = np.isfinite(sb)\n else:\n keep = np.isfinite(sb) * ((sb / sberr) > snrmin)\n #if filt == 'FUV':\n # pdb.set_trace()\n \n if cut_on_cog:\n keep *= (ellipsefit['sma_{}'.format(filt.lower())] * pixscale) <= np.max(ellipsefit['cog_sma_{}'.format(filt.lower())])\n keep = np.where(keep)[0]\n \n sbprofile['keep_{}'.format(filt.lower())] = keep\n\n if len(keep) == 0 or sma[0] == -1:\n sbprofile['sma_{}'.format(filt.lower())] = np.array([-1.0]).astype('f4') # [pixels]\n sbprofile['radius_{}'.format(filt.lower())] = np.array([-1.0]).astype('f4') # [arcsec]\n sbprofile['mu_{}'.format(filt.lower())] = np.array([-1.0]).astype('f4') # [nanomaggies/arcsec2]\n sbprofile['muerr_{}'.format(filt.lower())] = np.array([-1.0]).astype('f4') # [nanomaggies/arcsec2]\n else:\n sbprofile['sma_{}'.format(filt.lower())] = sma[keep] # [pixels]\n sbprofile['radius_{}'.format(filt.lower())] = radius[keep] # [arcsec]\n if linear:\n sbprofile['mu_{}'.format(filt.lower())] = sb[keep] # [nanomaggies/arcsec2]\n sbprofile['muerr_{}'.format(filt.lower())] = sberr[keep] # [nanomaggies/arcsec2]\n continue\n else:\n sbprofile['mu_{}'.format(filt.lower())] = 22.5 - 2.5 * np.log10(sb[keep]) # [mag/arcsec2]\n sbprofile['muerr_{}'.format(filt.lower())] = 2.5 * sberr[keep] / sb[keep] / np.log(10) # [mag/arcsec2]\n\n #sbprofile[filt] = 22.5 - 2.5 * np.log10(ellipsefit[filt].intens)\n #sbprofile['mu_{}_err'.format(filt.lower())] = 2.5 * ellipsefit[filt].int_err / \\\n # ellipsefit[filt].intens / np.log(10)\n #sbprofile['mu_{}_err'.format(filt.lower())] = np.sqrt(sbprofile['mu_{}_err'.format(filt.lower())]**2 + minerr**2)\n\n # Just for the plot use a minimum uncertainty\n #sbprofile['{}_err'.format(filt.lower())][sbprofile['{}_err'.format(filt.lower())] < minerr] = minerr\n\n if 'g' in bands and 'r' in bands and 'z' in bands:\n radius_gr, indx_g, indx_r = np.intersect1d(sbprofile['radius_g'], sbprofile['radius_r'], return_indices=True)\n sbprofile['gr'] = sbprofile['mu_g'][indx_g] - sbprofile['mu_r'][indx_r]\n sbprofile['gr_err'] = np.sqrt(sbprofile['muerr_g'][indx_g]**2 + sbprofile['muerr_r'][indx_r]**2)\n sbprofile['radius_gr'] = radius_gr\n\n radius_rz, indx_r, indx_z = np.intersect1d(sbprofile['radius_r'], sbprofile['radius_z'], return_indices=True)\n sbprofile['rz'] = sbprofile['mu_r'][indx_r] - sbprofile['mu_z'][indx_z]\n sbprofile['rz_err'] = np.sqrt(sbprofile['muerr_r'][indx_r]**2 + sbprofile['muerr_z'][indx_z]**2)\n sbprofile['radius_rz'] = radius_rz\n \n # SDSS\n if sdss and 'g' in bands and 'r' in bands and 'i' in bands:\n radius_gr, indx_g, indx_r = np.intersect1d(sbprofile['radius_g'], sbprofile['radius_r'], return_indices=True)\n sbprofile['gr'] = sbprofile['mu_g'][indx_g] - sbprofile['mu_r'][indx_r]\n sbprofile['gr_err'] = np.sqrt(sbprofile['muerr_g'][indx_g]**2 + sbprofile['muerr_r'][indx_r]**2)\n sbprofile['radius_gr'] = radius_gr\n\n radius_ri, indx_r, indx_i = np.intersect1d(sbprofile['radius_r'], sbprofile['radius_i'], return_indices=True)\n sbprofile['ri'] = sbprofile['mu_r'][indx_r] - sbprofile['mu_i'][indx_i]\n sbprofile['ri_err'] = np.sqrt(sbprofile['muerr_r'][indx_r]**2 + sbprofile['muerr_i'][indx_i]**2)\n sbprofile['radius_ri'] = radius_ri\n \n # Just for the plot use a minimum uncertainty\n #sbprofile['gr_err'][sbprofile['gr_err'] < minerr] = minerr\n #sbprofile['rz_err'][sbprofile['rz_err'] < minerr] = minerr\n\n # # Add the effective wavelength of each bandpass, although this needs to take\n # # into account the DECaLS vs BASS/MzLS filter curves.\n # from speclite import filters\n # filt = filters.load_filters('decam2014-g', 'decam2014-r', 'decam2014-z', 'wise2010-W1', 'wise2010-W2')\n # for ii, band in enumerate(('g', 'r', 'z', 'W1', 'W2')):\n # sbprofile.update({'{}_wave_eff'.format(band): filt.effective_wavelengths[ii].value})\n\n return sbprofile",
"def get_edisp_scale_map(self):\n \n scale_map = dict()\n\n scale_map['M_edges'] = scipy.concatenate((self._edisp['Mlow'], [self._edisp['Mhigh'][-1]]))\n scale_map['M_edges_new'] = scipy.concatenate((self._edisp['Mlow_new'], [self._edisp['Mhigh_new'][-1]]))\n\n #can_divide = self._edisp['M'] > 0\n #scale_map['Map'] = scipy.zeros_like(scale_map['M_edges'])\n #scale_map['Map'][can_divide] = self._edisp['M_new'][can_divide]/self._edisp['M'][can_divide]\n #wh_nan = scipy.where(scipy.isnan(scale_map['Map']))\n #scale_map['Map'][wh_nan] = 0\n #scale_map['Map'] -= 1 \n\n return scale_map",
"def get_scale_parameter(self):\n\n shape_in_gamma_func = float(1 + (1 / self._shape_parameter))\n gamma_func = special.gamma(shape_in_gamma_func)\n self._scale_parameter = self._mean_fire_recurrence / gamma_func",
"def scale(original_train, new_train):\n # find magnitude original training data\n o_mag = np.linalg.norm(np.stack(original_train[:,1]))\n # find magnitude new data\n n_mag = np.linalg.norm(np.stack(new_train[:,1]))\n # scale new data\n scale = o_mag / n_mag\n return scale",
"def analysis_function_ensquared(system, wavelength_idx, surface, config, px, py, box_size):\n\n det_pix = 15e-3 # Size of the detector pixel [mm]\n\n # Set Current Configuration\n system.MCE.SetCurrentConfiguration(config)\n\n # First of all, we need to find the Surface Number for the IMAGE SLICER \"Image Plane\"\n N_surfaces = system.LDE.NumberOfSurfaces\n surface_names = {} # A dictionary of surface number -> surface comment\n for k in np.arange(1, N_surfaces):\n surface_names[k] = system.LDE.GetSurfaceAt(k).Comment\n # find the Slicer surface number\n try:\n slicer_num = list(surface_names.keys())[list(surface_names.values()).index('Image Plane')]\n except ValueError:\n slicer_num = list(surface_names.keys())[list(surface_names.values()).index('IFU SRM FP')]\n slicer_surface = slicer_num\n # slicer = system.LDE.GetSurfaceAt(slicer_num)\n\n # Get the Field Points for that configuration\n sysField = system.SystemData.Fields\n N_fields = sysField.NumberOfFields\n N_waves = len(wavelength_idx)\n\n X_MAX = np.max([np.abs(sysField.GetField(i + 1).X) for i in range(N_fields)])\n Y_MAX = np.max([np.abs(sysField.GetField(i + 1).Y) for i in range(N_fields)])\n\n # Use the Field Point at the centre of the Slice\n fx, fy = sysField.GetField(2).X, sysField.GetField(2).Y\n hx, hy = fx / X_MAX, fy / Y_MAX # Normalized field coordinates (hx, hy)\n obj_xy = np.array([fx, fy])\n\n N_pupil = px.shape[0] # Number of rays in the Pupil for a given field point and wavelength\n N_rays = N_waves * N_pupil\n\n EE = np.empty(N_waves)\n sli_foc_xy = np.empty((N_waves, 2))\n det_foc_xy = np.empty((N_waves, 2))\n\n slicer_xy = np.empty((N_waves, N_pupil, 2))\n slicer_xy[:] = np.nan\n detector_xy = np.empty((N_waves, N_pupil, 2))\n detector_xy[:] = np.nan\n\n # (1) Run the raytrace up to the IMAGE SLICER\n raytrace = system.Tools.OpenBatchRayTrace()\n # remember to specify the surface to which you are tracing!\n rays_slicer = raytrace.CreateNormUnpol(N_rays, constants.RaysType_Real, slicer_surface)\n\n # Loop over all wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n\n for (p_x, p_y) in zip(px, py): # Add the ray to the RayTrace\n rays_slicer.AddRay(wave_idx, hx, hy, p_x, p_y, constants.OPDMode_None)\n\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n rays_slicer.StartReadingResults()\n checksum_slicer = 0\n for k in range(N_rays): # Get Raytrace results at the Image Slicer\n i_wave = k // N_pupil\n j_pupil = k % N_pupil\n # print(i_wave, j_pupil)\n output = rays_slicer.ReadNextResult()\n if output[2] == 0 and output[3] == 0:\n slicer_xy[i_wave, j_pupil, 0] = output[4]\n slicer_xy[i_wave, j_pupil, 1] = output[5]\n checksum_slicer += 1\n # this might have to change. We assume no vignetting should occur before the slicer\n # but for the MC this might happen\n if output[2] == 0 and output[3] != 0:\n vignetting_code = output[3]\n vignetting_surface = system.LDE.GetSurfaceAt(vignetting_code).Comment\n print(\"\\nConfig #%d\" % config)\n print(\"Vignetting at surface #%d: %s\" % (vignetting_code, vignetting_surface))\n\n if checksum_slicer < N_rays:\n raise ValueError('Some rays were lost before the Image Slicer')\n\n rays_slicer.ClearData()\n\n # Count how many rays fall inside a +- 1 mm window in Y, wrt the centroid\n slicer_cent_x = np.nanmean(slicer_xy[:, :, 0], axis=1)\n slicer_cent_y = np.nanmean(slicer_xy[:, :, 1], axis=1)\n sli_foc_xy[:, 0] = slicer_cent_x\n sli_foc_xy[:, 1] = slicer_cent_y\n\n # print(slicer_cent_y)\n below_slicer = slicer_xy[:, :, 1] < slicer_cent_y[:, np.newaxis] + 1.0 * box_size / 2\n above_slicer = slicer_xy[:, :, 1] > slicer_cent_y[:, np.newaxis] - 1.0 * box_size / 2\n inside_slicer = (np.logical_and(below_slicer, above_slicer))\n # print(inside_slicer[0, :10])\n\n # Now, for each wavelength, we calculate which rays fulfil the Image Slicer conditions\n index_valid_slicer = [np.argwhere(inside_slicer[i, :] == True)[:, 0] for i in range(N_waves)]\n # print(index_valid_slicer[1][:10])\n # print(index_valid_slicer[2][:10])\n\n # (2) Run the raytrace up to the DETECTOR\n # For speed, we re-use the same Raytrace, just define new rays!\n # raytrace_det = system.Tools.OpenBatchRayTrace()\n # Detector is always the last surface\n detector_surface = system.LDE.NumberOfSurfaces - 1\n rays_detector = raytrace.CreateNormUnpol(N_rays, constants.RaysType_Real, detector_surface)\n # Loop over all wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n for (p_x, p_y) in zip(px, py):\n rays_detector.AddRay(wave_idx, hx, hy, p_x, p_y, constants.OPDMode_None)\n\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n\n rays_detector.StartReadingResults()\n checksum_detector = 0\n # index_valid_detector = [] # Valid means they make it to the detector even if vignetted at the Slicer\n vignetted = []\n index_vignetted = []\n index_valid_detector = np.empty((N_waves, N_pupil))\n index_valid_detector[:] = np.nan\n for k in range(N_rays): # Get Raytrace results at the Detector\n i_wave = k // N_pupil\n j_pupil = k % N_pupil\n output = rays_detector.ReadNextResult()\n if output[2] == 0 and output[3] == 0: # ErrorCode & VignetteCode\n detector_xy[i_wave, j_pupil, 0] = output[4]\n detector_xy[i_wave, j_pupil, 1] = output[5]\n checksum_detector += 1\n index_valid_detector[i_wave, j_pupil] = j_pupil\n\n elif output[2] == 0 and output[3] != 0:\n # Some rays are vignetted\n vignetted.append([output[4], output[5]])\n detector_xy[i_wave, j_pupil, 0] = output[4]\n detector_xy[i_wave, j_pupil, 1] = output[5]\n checksum_detector += 1\n index_valid_detector[i_wave, j_pupil] = j_pupil\n index_vignetted.append(k)\n\n # index_valid_detector = np.array(index_valid_detector)\n # # print(index_valid_detector.shape)\n # # print(index_valid_detector)\n # index_valid_detector = index_valid_detector.reshape((N_waves, N_pupil))\n # # print(index_valid_detector.shape)\n\n rays_detector.ClearData()\n CastTo(raytrace, 'ISystemTool').Close()\n\n # (3) Calculate the ENSQUARED ENERGY\n # We only count the rays that where inside the slicer to begin with and the ones that make it to the detector\n for i_wave in range(N_waves):\n valid_both = []\n for k in range(N_pupil):\n # print(index_valid_detector[i_wave])\n if k in index_valid_slicer[i_wave] and k in index_valid_detector[i_wave]:\n valid_both.append(k)\n\n valid_det_x = detector_xy[i_wave, :, 0][valid_both]\n valid_det_y = detector_xy[i_wave, :, 1][valid_both]\n\n # Now, out of the VALID rays, we calculate which detector rays fall inside a 2x pixel box along X\n dcx = np.mean(valid_det_x) # Detector Centroid X\n dcy = np.mean(valid_det_y)\n det_foc_xy[i_wave] = [dcx, dcy]\n\n left_detector = valid_det_x < dcx + det_pix * box_size / 2\n right_detector = valid_det_x > dcx - det_pix * box_size / 2\n inside_detector = (np.logical_and(left_detector, right_detector))\n total_detector = np.sum(inside_detector)\n ensq = total_detector / N_pupil\n # print(ensq)\n EE[i_wave] = ensq * 0.98\n\n # SHOW THIS in the methodology\n\n # fig, axes = plt.subplots(2, N_waves)\n # colors = cm.Reds(np.linspace(0.5, 1, N_waves))\n # for j in range(N_waves):\n # ax1 = axes[0][j]\n # scy = sli_foc_xy[j, 1]\n # scx = sli_foc_xy[j, 0]\n # ax1.axhline(y=scy + 1.0 * box_size / 2, color='black', linestyle='--')\n # ax1.axhline(y=scy - 1.0 * box_size / 2, color='black', linestyle='--')\n # ax1.scatter(slicer_xy[j, :, 0], slicer_xy[j, :, 1], s=3, color=colors[j])\n # ax1.scatter(sli_foc_xy[j, 0], sli_foc_xy[j, 1], s=3, color='black')\n # wavelength = system.SystemData.Wavelengths.GetWavelength(wavelength_idx[j]).Wavelength\n # ax1.set_title(\"IMG SLI | %.3f $\\mu$m\" % wavelength)\n # ax1.set_aspect('equal')\n # ax1.get_yaxis().set_visible(False)\n # ax1.get_xaxis().set_visible(False)\n #\n # p = 1.2\n # ax1.set_xlim([scx - p * box_size / 2, scx + p * box_size / 2])\n # ax1.set_ylim([scy - p * box_size / 2, scy + p * box_size / 2])\n #\n # ax2 = axes[1][j]\n # dcx = det_foc_xy[j, 0]\n # dcy = det_foc_xy[j, 1]\n # ax2.scatter(detector_xy[j, :, 0], detector_xy[j, :, 1], s=3, color=colors[j])\n # ax2.scatter(det_foc_xy[j, 0], det_foc_xy[j, 1], s=3, color='black')\n # ax2.axvline(x=dcx + det_pix * box_size / 2, color='black', linestyle='--')\n # ax2.axvline(x=dcx - det_pix * box_size / 2, color='black', linestyle='--')\n # ax2.set_title(\"DET | %.3f $\\mu$m\" % wavelength)\n # ax2.set_aspect('equal')\n # ax2.get_yaxis().set_visible(False)\n # ax2.get_xaxis().set_visible(False)\n # ax2.set_xlim([dcx - p * det_pix * box_size / 2, dcx + p * det_pix * box_size / 2])\n # ax2.set_ylim([dcy - p * det_pix * box_size / 2, dcy + p * det_pix * box_size / 2])\n #\n #\n # plt.show()\n\n return EE, obj_xy, sli_foc_xy, det_foc_xy",
"def scaleHF_fluxspace(self, PFC, lqEich, S, P):\n # Get R and Z vectors at the midplane\n R_omp_sol = PFC.ep.g['lcfs'][:,0].max()\n R_omp_min = R_omp_sol - 5.0*lqEich*(1e-3) #in meters now\n R_omp_max = R_omp_sol + 20.0*lqEich*(1e-3) #in meters now\n #if R_omp_max is outside EFIT grid, cap at maximum R of grid\n if R_omp_max > max(PFC.ep.g['R']):\n R_omp_max = max(PFC.ep.g['R']) #in meters now\n R_omp = np.linspace(R_omp_min, R_omp_max, 1000)\n Z_omp = np.zeros(R_omp.shape)\n #Calculate flux at midplane using gfile\n psiN = PFC.ep.psiFunc.ev(R_omp,Z_omp)\n psi = psiN * (PFC.ep.g['psiSep']-PFC.ep.g['psiAxis']) + PFC.ep.g['psiAxis']\n PFC.psiMinLCFS = PFC.ep.psiFunc.ev(R_omp_sol,0.0)\n s_hat = psiN - PFC.psiMinLCFS\n # Evaluate B at outboard midplane\n Bp_omp = PFC.ep.BpFunc.ev(R_omp,Z_omp)\n Bt_omp = PFC.ep.BtFunc.ev(R_omp,Z_omp)\n B_omp = np.sqrt(Bp_omp**2 + Bt_omp**2)\n\n #Get q|| profile then integrate in Psi\n q_hat = self.eich_profile_fluxspace(PFC, lqEich, S, R_omp, Bp_omp, psiN)\n\n #Menard's method\n P0 = 2*np.pi * simps(q_hat, psi)\n #Matt's Method\n# P0 = 2*np.pi * simps(q_hat / B_omp, psi)\n #account for nonphysical power\n if P0 < 0: P0 = -P0\n #Scale to input power\n q0 = P/P0\n return q0",
"def apply_gaussian_resolution(self,params,data,fwhm=1,dE=0.01,E_max=100):\n print('\\n################### CONVOLUTION #####################\\n')\n print(f'\\n\\tConvolution with Gaussian function, FWHM = {fwhm} meV\\n')\n\n data.fwhm = fwhm\n c = fwhm/2.35482\n\n data.dE = dE\n data.E_max = E_max\n data.spectra_E = np.arange(0,data.E_max+data.dE,data.dE)\n data.spectra_num_E = len(data.spectra_E)\n data.spectra = np.zeros((data.spectra_num_E,params.num_Qpoints))\n data.smooth_spectra = np.zeros((data.spectra_num_E,params.num_Qpoints))\n structure_factors = []\n energies = []\n\n ### sum intensity of degenerate bands\n if params.sum_degenerate_bands == True:\n print('\\n\\tSumming degenerate bands before convolution (using convolution dE as tolerance)\\n')\n for q in range(params.num_Qpoints):\n sfac = data.structure_factors[:,q]\n energy = data.frequencies[f'{q}']\n reduced_energies = []\n summed_sfac = []\n while True:\n if len(energy) == 0:\n break\n test_energy = energy[0]\n reduced_energies.append(test_energy)\n indicies = np.intersect1d(np.argwhere(energy <= (test_energy+data.dE)),\n np.argwhere(energy > (test_energy-data.dE)))\n summed_sfac.append(sfac[indicies].sum())\n sfac = np.delete(sfac,indicies)\n energy = np.delete(energy,indicies)\n energies.append(reduced_energies)\n structure_factors.append(summed_sfac)\n else:\n print('\\n\\tWARNING: You should definitely sum degenerate bands!!!\\n')\n for q in range(params.num_Qpoints):\n energies.append(data.frequencies[f'{q}'])\n structure_factors.append(data.structure_factors[:,q])\n\n ### populate array for heatmap\n ### try statement takes care of negative energies\n for q in range(params.num_Qpoints):\n for b in range(len(structure_factors[q][:])):\n try: # if there are negative modes, argwhere returns an empty vector and the slice crashes\n data.spectra[np.argwhere(data.spectra_E <= \n energies[q][b]).max(),q] = structure_factors[q][b]\n except:\n continue\n\n if params.bose_factor == True:\n print('\\n\\tWARNING: Bose factor isnt verified. Need to compare to SNAXS.\\n')\n if params.temperature < 5:\n temperature = 5\n else:\n temperature = params.temperature\n inds = np.argwhere(data.spectra_E <= 0.5)\n tmp_e = np.copy(data.spectra_E)\n tmp_e[inds] = 0.5\n bose = 1+1/(np.exp(tmp_e/(constants.kb*1000*temperature))-1)\n bose = np.tile(bose.reshape((data.spectra_num_E,1)),reps=(1,params.num_Qpoints))\n data.spectra = np.multiply(data.spectra,bose)\n data.spectra = data.spectra/np.max(data.spectra)\n\n ### gaussian convolution using for loops, slow but very little memory utilization\n g_energy = np.append(data.spectra_E-data.spectra_E.max(),data.spectra_E[1:])\n gaussian = np.exp(-0.5*g_energy**2/c**2)/c/np.sqrt(2*np.pi)\n gaussian = np.tile(gaussian.reshape((gaussian.shape[0],1)),(1,data.num_Qpoints))\n tmp = np.append(data.spectra,data.spectra,axis=0)[1:,:]\n for e in range(data.spectra_num_E):\n if e%50 == 0:\n print(f'\\t------ {e}/{data.spectra_num_E} -------')\n data.smooth_spectra[e,:] = np.trapz(tmp*np.roll(gaussian,shift=e,axis=0),g_energy,axis=0)\n print('\\n\\tDone convolving!\\n')\n data.smooth_spectra = data.smooth_spectra/np.max(data.smooth_spectra)\n\n# if params.random_background == True:\n# data.smooth_spectra = data.smooth_spectra+(np.random.normal(0,1,\n# (data.smooth_spectra.shape[0],data.smooth_spectra.shape[1])))*0.001\n \n plt.imshow(data.smooth_spectra,origin='lower',aspect='auto',cmap='hot')\n plt.show()",
"def _scale_param(self, resid_us):\n return((resid_us**2).sum().sum() / self.dof)",
"def _scale_aeff(self, input_irf_file, config):\n\n # Reading the Aeff parameters\n self._aeff['Elow'] = input_irf_file['Effective area'].data['Energ_lo'][0].copy()\n self._aeff['Ehigh'] = input_irf_file['Effective area'].data['Energ_hi'][0].copy()\n self._aeff['ThetaLow'] = input_irf_file['Effective area'].data['Theta_lo'][0].copy()\n self._aeff['ThetaHi'] = input_irf_file['Effective area'].data['Theta_hi'][0].copy()\n self._aeff['Area'] = input_irf_file['Effective area'].data['EffArea'][0].transpose().copy()\n self._aeff['E'] = scipy.sqrt(self._aeff['Elow'] * self._aeff['Ehigh'])\n self._aeff['Theta'] = (self._aeff['ThetaLow'] + self._aeff['ThetaHi']) / 2.0\n \n # Creating the energy-theta mesh grid\n energy, theta = scipy.meshgrid(self._aeff['E'], self._aeff['Theta'], indexing='ij')\n\n # ----------------------------------\n # Scaling the Aeff energy dependence\n\n # Constant error function\n if config['energy_scaling']['err_func_type'] == \"constant\":\n self._aeff['Area_new'] = self._aeff['Area'] * config['energy_scaling']['constant']['scale']\n\n # Gradients error function\n elif config['energy_scaling']['err_func_type'] == \"gradient\":\n scaling_params = config['energy_scaling']['gradient']\n self._aeff['Area_new'] = self._aeff['Area'] * (\n 1 + scaling_params['scale'] * gradient(scipy.log10(energy),\n scipy.log10(scaling_params['range_min']),\n scipy.log10(scaling_params['range_max']))\n )\n \n # Step error function\n elif config['energy_scaling']['err_func_type'] == \"step\":\n scaling_params = config['energy_scaling']['step']\n break_points = list(zip(scipy.log10(scaling_params['transition_pos']),\n scaling_params['transition_widths']))\n self._aeff['Area_new'] = self._aeff['Area'] * (\n 1 + scaling_params['scale'] * step(scipy.log10(energy), break_points)\n )\n else:\n raise ValueError(\"Aeff energy scaling: unknown scaling function type '{:s}'\"\n .format(config['energy_scaling']['err_func_type']))\n # ----------------------------------\n\n # ------------------------------------------\n # Scaling the Aeff off-axis angle dependence\n\n # Constant error function\n if config['angular_scaling']['err_func_type'] == \"constant\":\n self._aeff['Area_new'] = self._aeff['Area_new'] * config['angular_scaling']['constant']['scale']\n\n # Gradients error function\n elif config['angular_scaling']['err_func_type'] == \"gradient\":\n scaling_params = config['angular_scaling']['gradient']\n self._aeff['Area_new'] = self._aeff['Area_new'] * (\n 1 + scaling_params['scale'] * gradient(theta,\n scaling_params['range_min'],\n scaling_params['range_max'])\n )\n\n # Step error function\n elif config['angular_scaling']['err_func_type'] == \"step\":\n scaling_params = config['angular_scaling']['step']\n break_points = list(zip(scaling_params['transition_pos'],\n scaling_params['transition_widths']))\n self._aeff['Area_new'] = self._aeff['Area_new'] * (\n 1 + scaling_params['scale'] * step(theta, break_points)\n )\n else:\n raise ValueError(\"Aeff angular scaling: unknown scaling function type '{:s}'\"\n .format(config['angular_scaling']['err_func_type']))\n # ------------------------------------------\n\n # Recording the scaled Aeff\n input_irf_file['Effective area'].data['EffArea'][0] = self._aeff['Area_new'].transpose()",
"def new_ssim(x_img, y_img, delta=1):\n lum, cont, strut = ssim(x_img, y_img)\n blur = blurriness_index(x_img, y_img)\n blur = pow(blur, delta)\n\n fin_score = lum*cont*strut*blur\n\n return fin_score",
"def reduce_resolution(wi, fi, fwhm0=0.55, sigma_floor=0.2):\n\n # all in AA\n w_lick_res = (4000., 4400., 4900., 5400., 6000.)\n lick_res = (11.5, 9.2, 8.4, 8.4, 9.8) # FWHM in AA\n\n w = np.asarray(wi)\n flux = np.atleast_2d(fi)\n\n # Linear interpolation of lick_res over w\n # numpy interp does constant instead of extrapolation\n # res = np.interp(w, w_lick_res, lick_res)\n\n # spline order: 1 linear, 2 quadratic, 3 cubic ...\n from scipy.interpolate import InterpolatedUnivariateSpline\n res = InterpolatedUnivariateSpline(w_lick_res, lick_res, k=1)(w)\n\n # Compute width from fwhm\n const = 2. * np.sqrt(2. * np.log(2)) # conversion fwhm --> sigma\n lick_sigma = np.sqrt((res ** 2 - fwhm0 ** 2)) / const\n\n # Convolution by g=1/sqrt(2*pi*sigma^2) * exp(-r^2/(2*sigma^2))\n flux_red = np.zeros(flux.shape, dtype=flux.dtype)\n\n for i, sigma in enumerate(lick_sigma):\n maxsigma = 3. * sigma\n # sampling floor: min (0.2, sigma * 0.1)\n delta = min(sigma_floor, sigma * 0.1)\n delta_wj = np.arange(-maxsigma, + maxsigma, delta)\n wj = delta_wj + w[i]\n for k, fk in enumerate(flux):\n fluxj = np.interp(wj, w, fk, left=0., right=0.)\n flux_red[k, i] = np.sum(fluxj * delta * np.exp(-0.5 * (delta_wj / sigma) ** 2))\n\n flux_red /= lick_sigma * const\n\n return flux_red.reshape(np.shape(fi))",
"def calculate_fwhm(self, surface, xy_data, PSF_window, N_points, spaxel_scale, wavelength, mode='diffraction'):\n\n start = time()\n # Calculate the Geometric PSF\n x, y = xy_data[:, 0], xy_data[:, 1]\n cent_x, cent_y = np.mean(x), np.mean(y)\n\n # Estimate the Geometric PSF using Kernel Density Estimation. The XY raytrace results are random samples\n # drawn from a probability distribution, the Geometric PSF. KDE estimates that distribution.\n # The main parameter of interest is the 'bandwidth' which defines the width of the kernel that KDE uses to\n # estimate the distribution. A narrower kernel will give a GeoPSF with finer structure; too wide a kernel will\n # just wash away the structure. We found that a bandwidth equal to the standard deviation of the raytrace data\n # works well\n std_x, std_y = np.std(x), np.std(y)\n bandwidth = min(std_x, std_y)\n kde = KernelDensity(kernel='gaussian', bandwidth=1.0*bandwidth).fit(xy_data)\n\n # define a grid to compute the PSF\n xmin, xmax = cent_x - PSF_window/2/1000, cent_x + PSF_window/2/1000\n ymin, ymax = cent_y - PSF_window/2/1000, cent_y + PSF_window/2/1000\n x_grid = np.linspace(xmin, xmax, N_points)\n y_grid = np.linspace(ymin, ymax, N_points)\n xx_grid, yy_grid = np.meshgrid(x_grid, y_grid)\n xy_grid = np.vstack([xx_grid.ravel(), yy_grid.ravel()]).T\n log_scores = kde.score_samples(xy_grid)\n\n psf_geo = np.exp(log_scores)\n psf_geo /= np.max(psf_geo)\n psf_geo = psf_geo.reshape(xx_grid.shape)\n\n time_geopsf = time() - start\n # print(\"Time to estimate GeoPSF: %.3f sec\" % time_geo)\n\n if mode == \"diffraction\":\n start = time()\n\n psf_diffr = diffraction.add_diffraction(surface=surface, psf_geo=psf_geo, PSF_window=PSF_window,\n scale_mas=spaxel_scale, wavelength=wavelength)\n time_diffpsf = time() - start\n # print(\"Time to add Diffraction: %.3f sec\" % time_diffpsf)\n\n # Fit the PSF to a 2D Gaussian\n start = time()\n guess_x = PSF_window / 2 / 1000\n fwhm_x, fwhm_y = diffraction.fit_psf_to_gaussian(xx=xx_grid, yy=yy_grid, psf_data=psf_diffr,\n x0=cent_x, y0=cent_y, sigmax0=guess_x, sigmay0=guess_x)\n psf_result = psf_diffr\n\n elif mode == \"geometric\":\n\n start = time()\n guess_x = PSF_window / 2 / 1000\n fwhm_x, fwhm_y = diffraction.fit_psf_to_gaussian(xx=xx_grid, yy=yy_grid, psf_data=psf_geo,\n x0=cent_x, y0=cent_y, sigmax0=guess_x, sigmay0=guess_x)\n psf_result = psf_geo\n\n # fig, (ax1, ax2) = plt.subplots(1, 2)\n # img1 = ax1.imshow(psf_geo, extent=[xmin, xmax, ymin, ymax], cmap='plasma', origin='lower')\n # ax1.scatter(x, y, s=1, color='white', alpha=0.5)\n # plt.colorbar(img1, ax=ax1, orientation='horizontal')\n # ax1.set_xlabel(r'X [mm]')\n # ax1.set_ylabel(r'Y [mm]')\n # ax1.set_title(r'Geometric PSF estimate | Surface: %s' % surface)\n #\n # ax2.plot(x_grid, psf_geo[N_points // 2])\n # xbins, bins, p = ax2.hist(x, bins=np.linspace(xmin, xmax, N_points), density=True)\n # for item in p:\n # item.set_height(item.get_height() / np.max(xbins))\n # ax2.set_ylim([0, 1])\n # plt.show()\n\n time_gauss = time() - start\n\n # print('FWHM time: %.3f sec for GeoPSF estimate:' % time_geopsf)\n # print('FWHM time: %.3f sec for DiffPSF convolution:' % time_diffpsf)\n # print('FWHM time: %.3f sec for Gaussian fit:' % time_gauss)\n\n #\n # img2 = ax2.imshow(psf_diffr, extent=[xmin, xmax, ymin, ymax], cmap='plasma', origin='lower')\n # plt.colorbar(img2, ax=ax2, orientation='horizontal')\n # ax2.set_xlabel(r'X [mm]')\n # ax2.set_ylabel(r'Y [mm]')\n # if surface == 'DET':\n # ax2.set_title(r'Diffr. PSF | %.3f microns | %.1f mas | FWHM_x: %.1f $\\mu$m' % (wavelength, spaxel_scale, fwhm_x))\n # elif surface == 'IS':\n # ax2.set_title(r'Diffr. PSF | %.3f microns | %.1f mas | FWHM_y: %.1f $\\mu$m' % (wavelength, spaxel_scale, fwhm_y))\n\n return fwhm_x, fwhm_y, psf_result",
"def _filter_scaling(reduction_indices, start_cell_num):\n filter_scaling = 1.0\n for ind in reduction_indices:\n if ind < start_cell_num:\n filter_scaling *= 2.0\n return filter_scaling",
"def spectrum_processing(s):\n s = default_filters(s)\n s = add_precursor_mz(s)\n s = normalize_intensities(s)\n s = reduce_to_number_of_peaks(s, n_required=5, ratio_desired=0.5, n_max=500)\n s = select_by_mz(s, mz_from=0, mz_to=1000)\n s = add_losses(s, loss_mz_from=10.0, loss_mz_to=200.0)\n s = require_minimum_number_of_peaks(s, n_required=5)\n return s",
"def _reduce_resolution(wi, fi, fwhm0=0.55, sigma_floor=0.2):\n\n # all in AA\n w_lick_res = (4000., 4400., 4900., 5400., 6000.)\n lick_res = (11.5, 9.2, 8.4, 8.4, 9.8) # FWHM in AA\n\n w = np.asarray(wi)\n flux = np.atleast_2d(fi)\n\n # Linear interpolation of lick_res over w\n # numpy interp does constant instead of extrapolation\n # res = np.interp(w, w_lick_res, lick_res)\n\n # spline order: 1 linear, 2 quadratic, 3 cubic ...\n from scipy.interpolate import InterpolatedUnivariateSpline\n res = InterpolatedUnivariateSpline(w_lick_res, lick_res, k=1)(w)\n\n # Compute width from fwhm\n const = 2. * np.sqrt(2. * np.log(2)) # conversion fwhm --> sigma\n lick_sigma = np.sqrt((res ** 2 - fwhm0 ** 2)) / const\n\n # Convolution by g=1/sqrt(2*pi*sigma^2) * exp(-r^2/(2*sigma^2))\n flux_red = np.zeros(flux.shape, dtype=flux.dtype)\n\n for i, sigma in enumerate(lick_sigma):\n maxsigma = 3. * sigma\n # sampling floor: min (0.2, sigma * 0.1)\n delta = min(sigma_floor, sigma * 0.1)\n delta_wj = np.arange(-maxsigma, + maxsigma, delta)\n wj = delta_wj + w[i]\n for k, fk in enumerate(flux):\n fluxj = np.interp(wj, w, fk, left=0., right=0.)\n flux_red[k, i] = np.sum(fluxj * delta *\n np.exp(-0.5 * (delta_wj / sigma) ** 2))\n\n flux_red /= lick_sigma * const\n\n return flux_red.reshape(np.shape(fi))",
"def newShape(self, sfs, scale='eigs'):\n if not isinstance(sfs, (list, tuple, np.ndarray)):\n raise TypeError('sfs is invalid type (expected array-like, found: {}'.format(type(sfs)))\n if len(sfs) != len(self.pc_stdevs):\n raise ValueError('sfs must be of the same length as the number of '\n 'principal components (expected {} but found {})'.format(len(self.pc_stdevs), len(sfs)))\n if scale == 'eigs':\n sf = (self.pca_U * sfs).sum(axis=1)\n elif scale == 'std':\n sf = (self.pca_U * self.pc_stdevs * sfs).sum(axis=1)\n else:\n raise ValueError(\"Invalid scale (expected 'eigs' or 'std' but found{}\".format(scale))\n return self.pca_mean + sf",
"def scaling_factor(self):\n bin_scale = self.spabins * self.spebins\n return bin_scale * self.int_time",
"def FindScale(self):\n\n ## 6 and from the cv code the distance is 6 then we are good\n print(\"TODO: Very hard\")",
"def rescale(self):\n # Get the L1 norm of data and scale correction for each fiber\n data_dims = self.data_dims\n if data_dims is ():\n tens_scale = self.data.abs()\n else:\n tens_scale = torch.sum(self.data.abs(), dim=data_dims, keepdim=True)\n log_shift = torch.floor(TARGET_SCALE(self.shape, data_dims) - \n torch.log2(tens_scale))\n\n # Keep the scale for zero fibers unchanged\n if torch.any(torch.isinf(log_shift)):\n log_shift = torch.where(torch.isfinite(log_shift), log_shift,\n torch.zeros_like(log_shift))\n\n return STensor(self.data*(2**log_shift), \n self.scale-log_shift)",
"def _set_psf_layout_psfex(self):\n\n print('setting psf layout for PSFEx')\n\n obj_data=self.obj_data\n psf_data=self.psf_data\n\n total_psf_pixels = 0\n\n #psf_npix = psf_size*psf_size\n\n psf_start_row = 0\n for iobj in range(obj_data.size):\n for icut in range(obj_data['ncutout'][iobj]):\n\n row = obj_data['orig_row'][iobj, icut]\n col = obj_data['orig_col'][iobj, icut]\n file_id = obj_data['file_id'][iobj,icut]\n\n p = psf_data[file_id]\n\n pim = p.get_rec(row,col)\n cen = p.get_center(row,col)\n\n psf_shape = pim.shape\n psf_npix = pim.size\n\n obj_data['psf_row_size'][iobj,icut] = psf_shape[0]\n obj_data['psf_col_size'][iobj,icut] = psf_shape[1]\n obj_data['psf_cutout_row'][iobj,icut] = cen[0]\n obj_data['psf_cutout_col'][iobj,icut] = cen[1]\n obj_data['psf_start_row'][iobj,icut] = psf_start_row\n\n psf_start_row += psf_npix\n total_psf_pixels += psf_npix\n\n\n self.total_psf_pixels = total_psf_pixels",
"def run_script():\n creator = EnergyScaleCreator(IMP, FONT_SIZE)\n creator.get_scaled_image(WIDTH_MIN)\n creator.extend_image()\n ticks = create_ticks(IMP, TICK_COUNT)\n creator.draw_scale(ticks)\n imp_extended = creator.image\n imp_extended.show()",
"def _square_and_smooth(sig, center_freq, fs):\n # Frequency dependent time constant\n if center_freq <= 1000:\n tau = 2 / (3 * center_freq)\n else:\n tau = 2 / (3 * 1000)\n # Squaring\n sig = sig ** 2\n # Three smoothing low-pass filters\n a1 = np.exp(-1 / (fs * tau))\n b0 = 1 - a1\n # zi = signal.lfilter_zi([b0], [1 -a1])\n for i in range(3):\n sig = signal.lfilter([b0], [1, -a1], sig)\n return sig",
"def ps(image):\n\timage = image.astype(float)\n\tps_img = abs(pow(fft2(image), 2))\n\treturn ps_img",
"def calc_psd2d(self):\n print(\"Calculating 2D power spectral density ... \", end=\"\", flush=True)\n rows, cols = self.shape\n imgf = np.fft.fftshift(np.fft.fft2(self.image))\n # NOTE: normalize w.r.t. image size\n norm = rows * cols * self.pixel[0]**2\n self.psd2d = (np.abs(imgf) ** 2) / norm\n print(\"DONE\", flush=True)\n return self.psd2d",
"def stdsize(image,r=30):\n image = square(image)\n s,_ = image.shape\n return interpolation.zoom(image,(r+0.5)/float(s))",
"def set_scaling(self, scaling):\n self.scaling = scaling\n self.eff_box_size = int(self.box_size*self.scaling+0.5)",
"def smoothSpectrum(f, X_f, r_oct):\n X_f_out = np.zeros(np.shape(X_f))\n for n in range(np.shape(f)[0]):\n # standard deviation\n sigma = f[n] / r_oct / np.pi\n # Gaussian window with the center frequnecy f[n] an dstandard deviation\n w = np.exp( -(f-f[n])**2 / (2*sigma**2) )\n w = w / np.sum(w, axis=0)\n X_f_out[n] = np.sum(w * X_f)\n \n return X_f_out",
"def plotBeamSize( self, Srange, eps, delP, plane = 'x', scaleXY = 1e2, save = 0 ):\n from Tools import sigm\n from VisualSpecs import myColors as colors \n from VisualSpecs import align_yaxis\n\n condition = (self.df.S > self.Smax - Srange) & (self.df.S <= self.Smax)\n slFr = self.df[condition]\n print('slected last', Srange, 'm upstream. Scale factor =', scaleXY)\n # init the plot and split x\n #\n fig = plt.figure( figsize = (20,10) ); ax = fig.add_subplot(111)\n twin = ax.twinx()\n\n # plot physical aperture\n #\n maxAper = self.df.APER.max()\n print('maximum aperture found:', maxAper)\n\n ax.plot( slFr.S, slFr.APER*scaleXY, lw = 3., color = colors[11] )\n ax.plot( slFr.S, -slFr.APER*scaleXY, lw = 3., color = colors[11] )\n ax.set_ylabel('aperture [cm]'); ax.set_ylim( -(maxAper+maxAper/10)*scaleXY, (maxAper+maxAper/10)*scaleXY )\n\n \n twin.set_ylabel('beam size $\\\\sigma$ [cm]')\n \n if plane == 'x':\n\n twin.plot( slFr.S, sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[2], label = '$\\\\sigma_x$' ) \n twin.plot( slFr.S, -sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[2] )\n\n twin.plot( slFr.S, 10*sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[3], ls = '--', label = '10$\\\\sigma_x$') \n twin.plot( slFr.S, -10*sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[3], ls = '--' ) # \n\n twin.plot( slFr.S, 20*sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[4], ls = ':', label = '20$\\\\sigma_x$' ) \n twin.plot( slFr.S, -20*sigm(slFr.BETX, slFr.DX, eps, delP, scaleXY), color = colors[4], ls = ':' ) # \n align_yaxis(ax, 0, twin, 0); twin.set_ylim( -(maxAper+maxAper/10)*scaleXY, (maxAper+maxAper/10)*scaleXY ) \n\n plt.legend() \n plt.title('horizontal beam size and physical aperture')\n if save: print('saving fig ...'); plt.savefig( self.plotpath + 'physAprt_hrzt_beamSize100m.pdf', bbox_inches = 'tight', dpi = 70)\n \n else:\n\n twin.plot( slFr.S, sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[2], label = '$\\\\sigma_y$' ) \n twin.plot( slFr.S, -sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[2] )\n\n twin.plot( slFr.S, 10*sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[3], ls = '--', label = '10$\\\\sigma_y$') \n twin.plot( slFr.S, -10*sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[3], ls = '--' ) # \n\n twin.plot( slFr.S, 20*sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[4], ls = ':', label = '20$\\\\sigma_y$' ) \n twin.plot( slFr.S, -20*sigm(slFr.BETY, slFr.DY, eps, delP, scaleXY), color = colors[4], ls = ':' ) # \n align_yaxis(ax, 0, twin, 0); twin.set_ylim( -(maxAper+maxAper/10)*scaleXY, (maxAper+maxAper/10)*scaleXY )\n\n plt.legend()\n plt.title('vertical beam size and physical aperture')\n if save: print('saving fig ...'); plt.savefig( self.plotpath + 'physAprt_vrt_beamSize100m.pdf', bbox_inches = 'tight', dpi = 70)\n\n return fig",
"def imageScale(scale):\n\t\treturn max(1, int(scale * (InterfaceTools.getCanvasSize()[0] / height)))",
"def spaxel_scale(scale=4, wave=1.0):\n\n scale_rad = scale / MILIARCSECS_IN_A_RAD\n rho = scale_rad * ELT_DIAM / (wave * 1e-6)\n print(rho)",
"def calibrate(cap, location):\n\n #Poisition and size of sensor\n [x, y, h, w] = location\n\n #show square to user and wait for key\n print(\"please, step away to clear the blue square displayed on screen and press q to continue\")\n while True:\n ret, frame = cap.read()\n cv2.namedWindow('Calibrate',cv2.WINDOW_NORMAL)\n show = cv2.rectangle(frame, (x,y), (x+w,y+h), (255, 0, 0) , 5)\n cv2.imshow('Calibrate', show)\n key = cv2.waitKey(1)\n if key == ord('q'):\n break\n\n #get first image, process and define window previous for iteration\n ret, frame = cap.read()\n frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n frame = cv2.GaussianBlur(frame, (7,7), 0)\n previous = frame[y:y+w,x:x+h]\n\n #set parameters for mean value of sensor, kernel of erode function,\n sampleNbMean = 50\n xi = np.empty((0, sampleNbMean))\n kernel = np.ones((5,5), np.uint8)\n\n #iterate over each frame until sample number\n for iteration in range(sampleNbMean):\n\n # Capture frame, draw the window and display to the user\n ret, frame = cap.read()\n # Image operation\n frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n frame = cv2.GaussianBlur(frame, (7,7), 0)\n\n #get present window\n present = frame[y:y+w,x:x+h]\n\n #add sample for mean, add diference of window with prieviuos\n xi = np.append(xi,\n np.sum(\n cv2.erode(\n cv2.bitwise_xor(present,previous), kernel, iterations=1)))\n\n #present image becomes previous before steping into next image\n previous = present\n\n #mean\n mean = np.sum(xi)/len(xi)\n\n #sigma\n sum = 0\n for sample in xi:\n sum += np.power(sample - mean, 2)\n sigma = np.sqrt(sum/len(xi))\n\n #close window\n cv2.destroyWindow('Calibrate')\n\n return mean, sigma",
"def scale(image, maxval=1024):\n image += maxval # minimum value is now 0\n image /= maxval*2\n\n return(image)",
"def findScalingCoeffsMultiExp(self, PFC, lqCN, lqCF, lqPN, lqPF):\n # Get R and Z vectors at the midplane\n R_omp_sol = PFC.ep.g['lcfs'][:,0].max()\n R_omp_min = R_omp_sol - 5.0*(lqPN + lqPF) #already in m\n R_omp_max = R_omp_sol + 20.0*(lqCN + lqCF) #already in m\n #if R_omp_max is outside EFIT grid, cap at maximum R of grid\n if R_omp_max > max(PFC.ep.g['R']):\n R_omp_max = max(PFC.ep.g['R']) #in meters now\n R_omp = np.linspace(R_omp_min, R_omp_max, 1000)\n Z_omp = np.zeros(R_omp.shape)\n\n\n # Evaluate B at outboard midplane\n Bp_omp = PFC.ep.BpFunc.ev(R_omp,Z_omp)\n Bt_omp = PFC.ep.BtFunc.ev(R_omp,Z_omp)\n B_omp = np.sqrt(Bp_omp**2 + Bt_omp**2)\n\n #Find coordinate transformation vector at midplane\n psiaxis = PFC.ep.g['psiAxis']\n psiedge = PFC.ep.g['psiSep']\n deltaPsi = np.abs(psiedge - psiaxis)\n gradPsi = Bp_omp*R_omp\n xfm = gradPsi / deltaPsi\n # transform hf width into flux space\n lqPN_hat = lqPN*xfm\n lqPF_hat = lqPF*xfm\n lqCN_hat = lqCN*xfm\n lqCF_hat = lqCF*xfm\n\n #Calculate flux at midplane using gfile\n psiN = PFC.ep.psiFunc.ev(R_omp,Z_omp)\n psi = psiN*(psiedge - psiaxis) + psiaxis\n PFC.psiMinLCFS = PFC.ep.psiFunc.ev(R_omp_sol,0.0)\n s_hat = psiN - PFC.psiMinLCFS\n\n #find locations in Private vs Common flux regions\n useP = np.where(s_hat < 0.0)[0]\n useC = np.where(s_hat >= 0.0)[0]\n\n #integral for SOL in flux space\n if len(useP)>0:\n qPN_hat = np.exp( s_hat[useP] / lqPN_hat[useP])\n qPF_hat = np.exp( s_hat[useP] / lqPF_hat[useP])\n #reinke method\n #intPN = simps(qPN_hat / B_omp[useP], psi[useP])\n #intPF = simps(qPF_hat / B_omp[useP], psi[useP])\n #menard method\n intPN = simps(qPN_hat, psi[useP])\n intPF = simps(qPF_hat, psi[useP])\n else:\n qPN_hat = 0.0\n qPF_hat = 0.0\n intPN = 0.0\n intPF = 0.0\n\n if len(useC)>0:\n qCN_hat = np.exp(-s_hat[useC] / lqCN_hat[useC])\n qCF_hat = np.exp(-s_hat[useC] / lqCF_hat[useC])\n #reinke method\n #intCN = simps(qCN_hat / B_omp[useC], psi[useC])\n #intCF = simps(qCF_hat / B_omp[useC], psi[useC])\n #menard method\n intCN = simps(qCN_hat, psi[useC])\n intCF = simps(qCF_hat, psi[useC])\n else:\n qCN_hat = 0.0\n qCF_hat = 0.0\n intCN = 0.0\n intCF = 0.0\n\n P0 = 2*np.pi * (intCN*self.fracCN + intCF*self.fracCF +\n intPN*self.fracPN + intPF*self.fracPF)\n #account for nonphysical power\n if P0 < 0: P0 = -P0\n #Scale to input power\n q0 = self.Psol/P0\n\n #old method left for reference (same math)\n #q0 = (self.Psol/(2*np.pi)) / (intCN*self.fracCN + intCF*self.fracCF +\n # intPN*self.fracPN + intPF*self.fracPF)\n\n return q0",
"def fs2ps2D(px, s):\n\t\tsfun = psarclength(px)\t\n\t\treturn sfun-s",
"def scaling(self):\n return self.__scaling",
"def calc_psf_fwhm_inpix_gaussian(arr):\n\tmodel = fit_gaussian(arr)\n\n\tsigma = max(model.y_stddev, model.x_stddev)\n\tfwhm = 2.355 * sigma\n\n\treturn fwhm",
"def intensityRatio(self, wvlRange=None, wvlRanges=None, top=10):\n\n if not hasattr(self, 'Intensity'):\n try:\n self.intensity()\n except:\n print(' intensities not calculated and emiss() is unable to calculate them')\n print(' perhaps the temperature and/or eDensity are not set')\n return\n fontsize=14\n eDensity = self.EDensity\n temperature = self.Temperature\n ntemp = temperature.size\n if ntemp > 0:\n if temperature[0] == temperature[-1]:\n ntemp = 1\n ndens = eDensity.size\n if ndens > 0:\n if eDensity[0] == eDensity[-1]:\n ndens = 1\n print(' ndens = %5i ntemp = %5i'%(ndens, ntemp))\n\n ionS = self.Intensity['ionS']\n # see if we are dealing with more than a single ion\n ionSet = set(ionS)\n ionNum = len(ionSet)\n wvl = self.Intensity[\"wvl\"]\n # find which lines are in the wavelength range if it is set\n if wvlRange:\n igvl=util.between(wvl,wvlRange)\n if len(igvl) == 0:\n print('no lines in wavelength range %12.2f - %12.2f'%(wvlRange[0], wvlRange[1]))\n return\n elif wvlRanges:\n igvl = []\n for awvlRange in wvlRanges:\n igvl.extend(util.between(wvl,awvlRange))\n if len(igvl) == 0:\n print('no lines in wavelength ranges specified ')\n return\n else:\n igvl=range(len(wvl))\n nlines=len(igvl)\n igvl=np.take(igvl,wvl[igvl].argsort())\n if top > nlines:\n top=nlines\n intensity = self.Intensity['intensity']\n maxIntens = np.zeros(nlines,'Float64')\n for iline in range(nlines):\n maxIntens[iline] = intensity[:, igvl[iline]].max()\n for iline in range(nlines):\n if maxIntens[iline]==maxIntens.max():\n maxAll=intensity[:, igvl[iline]]\n igvlsort=np.take(igvl,np.argsort(maxIntens))\n topLines=igvlsort[-top:]\n maxWvl='%5.3f' % wvl[topLines[-1]]\n topLines=topLines[wvl[topLines].argsort()]\n print(' maxWvl = %s'%(maxWvl))\n\n # need to make sure there are no negative values before plotting\n good = intensity > 0.\n intensMin = intensity[good].min()\n bad = intensity <= 0.\n intensity[bad] = intensMin\n\n ylabel='Intensity relative to '+maxWvl\n if ionNum == 1:\n title=self.Spectroscopic\n else:\n title = ''\n\n if ndens==1 and ntemp==1:\n print(' only a single temperature and eDensity')\n return\n elif ndens == 1:\n xlbl='Temperature (K)'\n xvalues=self.Temperature\n outTemperature=self.Temperature\n outDensity = self.EDensity\n desc_str=' Density = %10.2e (cm$^{-3}$)' % self.EDensity[0]\n elif ntemp == 1:\n xvalues=self.EDensity\n outTemperature = self.Temperature\n outDensity=self.EDensity\n xlbl='Electron Density (cm$^{-3}$)'\n desc_str=' Temp = %10.2e (K)' % self.Temperature[0]\n else:\n outTemperature=self.Temperature\n outDensity=self.EDensity\n xlbl='Temperature (K)'\n xvalues=self.Temperature\n desc_str=' Variable Density'\n # put all actual plotting here\n plt.ion()\n # maxAll is an array\n ymax = np.max(intensity[:, topLines[0]]/maxAll)\n ymin = ymax\n plt.figure()\n ax = plt.subplot(111)\n nxvalues=len(xvalues)\n # reversing is necessary - otherwise, get a ymin=ymax and a matplotlib error\n for iline in range(top-1, -1, -1):\n tline=topLines[iline]\n plt.loglog(xvalues,intensity[:, tline]/maxAll)\n if np.min(intensity[:, tline]/maxAll) < ymin:\n ymin = np.min(intensity[:, tline]/maxAll)\n if np.max(intensity[:, tline]/maxAll) > ymax:\n ymax = np.max(intensity[:, tline]/maxAll)\n skip=2\n start=divmod(iline,nxvalues)[1]\n for ixvalue in range(start,nxvalues,nxvalues//skip):\n if ionNum == 1:\n text = '%10.4f'%(wvl[tline])\n else:\n text = '%s %10.4f'%(ionS[tline], wvl[tline])\n plt.text(xvalues[ixvalue], intensity[ixvalue, tline]/maxAll[ixvalue], text)\n if ndens == 1:\n print('%12.2e %12.2e '%(xvalues.min(),xvalues.max()))\n plt.xlim(xvalues.min(),xvalues.max())\n plt.xlabel(xlbl,fontsize=fontsize)\n plt.ylabel(ylabel,fontsize=fontsize)\n elif ntemp == 1:\n plt.text(0.07, 0.5,title, horizontalalignment='left', verticalalignment='center', fontsize=fontsize, transform = ax.transAxes)\n ax2 = plt.twiny()\n xlblDen=r'Electron Density (cm$^{-3}$)'\n plt.xlabel(xlblDen, fontsize=fontsize)\n plt.loglog(eDensity,intensity[:, topLines[top-1]]/maxAll, visible=False)\n ax2.xaxis.tick_top()\n plt.ylim(ymin/1.2, 1.2*ymax)\n else:\n plt.ylim(ymin/1.2, 1.2*ymax)\n plt.title(title+desc_str,fontsize=fontsize)\n plt.tight_layout()\n plt.draw()\n # need time to let matplotlib finish plotting\n time.sleep(0.5)\n\n # get line selection\n selectTags = []\n for itop in topLines:\n if ionNum == 1:\n selectTags.append(str(wvl[itop]))\n else:\n selectTags.append(ionS[itop]+ ' '+ str(wvl[itop]))\n numden = chGui.gui.choice2Dialog(selectTags)\n\n # num_idx and den_idx are tuples\n num_idx=numden.numIndex\n if len(num_idx) == 0:\n print(' no numerator lines were selected')\n return\n den_idx=numden.denIndex\n if len(den_idx) == 0:\n print(' no denominator lines were selected')\n return\n numIntens=np.zeros(len(xvalues),'Float64')\n for aline in num_idx:\n numIntens += intensity[:, topLines[aline]]\n\n denIntens = np.zeros(len(xvalues),'Float64')\n for aline in den_idx:\n denIntens += intensity[:, topLines[aline]]\n\n # plot the desired ratio\n # maxAll is an array\n plt.figure()\n ax = plt.subplot(111)\n plt.loglog(xvalues,numIntens/denIntens)\n plt.xlim(xvalues.min(),xvalues.max())\n plt.xlabel(xlbl,fontsize=fontsize)\n plt.ylabel('Ratio ('+self.Defaults['flux']+')',fontsize=fontsize)\n if ionNum == 1:\n desc = ionS[0]\n else:\n desc = ''\n for aline in num_idx:\n if ionNum == 1:\n desc += ' ' + str(wvl[topLines[aline]])\n else:\n desc += ' ' + ionS[topLines[aline]] + ' ' + str(wvl[topLines[aline]])\n desc += ' / '\n for aline in den_idx:\n if ionNum == 1:\n desc += ' ' + str(wvl[topLines[aline]])\n else:\n desc += ' ' + ionS[topLines[aline]] + ' ' + str(wvl[topLines[aline]])\n if ndens == ntemp and ntemp > 1:\n plt.text(0.07, 0.5,desc, horizontalalignment='left', verticalalignment='center', fontsize=fontsize, transform = ax.transAxes)\n #\n ax2 = plt.twiny()\n xlblDen=r'Electron Density (cm$^{-3}$)'\n plt.xlabel(xlblDen, fontsize=fontsize)\n plt.loglog(eDensity,numIntens/denIntens, visible=False)\n ax2.xaxis.tick_top()\n else:\n plt.title(desc,fontsize=fontsize)\n plt.tight_layout()\n\n cnt = desc.count(' ')\n intensityRatioFileName = desc.replace(' ', '_', cnt) + '.rat'\n intensityRatioFileName = intensityRatioFileName.lstrip('_').replace('_/_','-')\n self.IntensityRatio={'ratio':numIntens/denIntens,'desc':desc,\n 'temperature':outTemperature,'eDensity':outDensity,'filename':intensityRatioFileName, 'numIdx':num_idx, 'denIdx':den_idx}",
"def intensityPSF_Iron(N=1000):\n col_seq = [ ( 0/255., 0/255., 0/255.),\n ( 15/255., 0/255., 65/255.),\n ( 30/255., 0/255., 100/255.),\n ( 50/255., 0/255., 135/255.),\n ( 75/255., 0/255., 147/255.),\n (100/255., 0/255., 153/255.),\n (122/255., 0/255., 158/255.),\n (128/255., 0/255., 160/255.),\n (134/255., 0/255., 160/255.),\n (146/255., 0/255., 158/255.),\n (152/255., 0/255., 157/255.),\n (157/255., 0/255., 156/255.),\n (162/255., 0/255., 155/255.),\n (167/255., 0/255., 154/255.),\n (171/255., 0/255., 153/255.),\n (175/255., 2/255., 152/255.),\n (178/255., 4/255., 151/255.),\n (182/255., 6/255., 150/255.),\n (185/255., 8/255., 149/255.),\n (188/255., 10/255., 147/255.),\n (191/255., 12/255., 146/255.),\n (193/255., 14/255., 144/255.),\n (195/255., 16/255., 142/255.),\n (198/255., 18/255., 139/255.),\n (201/255., 20/255., 135/255.),\n (203/255., 22/255., 132/255.),\n (206/255., 24/255., 127/255.),\n (208/255., 26/255., 121/255.),\n (210/255., 29/255., 116/255.),\n (212/255., 33/255., 111/255.),\n (214/255., 37/255., 103/255.),\n (217/255., 41/255., 97/255.),\n (219/255., 46/255., 89/255.),\n (221/255., 49/255., 78/255.),\n (222/255., 53/255., 66/255.),\n (223/255., 54/255., 60/255.),\n (224/255., 56/255., 54/255.),\n (225/255., 58/255., 48/255.),\n (226/255., 60/255., 42/255.),\n (227/255., 62/255., 38/255.),\n (227/255., 63/255., 34/255.),\n (228/255., 64/255., 30/255.),\n (229/255., 68/255., 25/255.),\n (229/255., 70/255., 23/255.),\n (230/255., 72/255., 20/255.),\n (231/255., 74/255., 18/255.),\n (232/255., 76/255., 16/255.),\n (234/255., 78/255., 12/255.),\n (235/255., 82/255., 10/255.),\n (236/255., 86/255., 8/255.),\n (237/255., 90/255., 7/255.),\n (237/255., 92/255., 6/255.),\n (238/255., 94/255., 5/255.),\n (239/255., 96/255., 4/255.),\n (240/255., 100/255., 3/255.),\n (241/255., 103/255., 3/255.),\n (241/255., 106/255., 2/255.),\n (242/255., 109/255., 1/255.),\n (243/255., 113/255., 1/255.),\n (244/255., 116/255., 0/255.),\n (244/255., 120/255., 0/255.),\n (245/255., 125/255., 0/255.),\n (246/255., 129/255., 0/255.),\n (247/255., 133/255., 0/255.),\n (248/255., 136/255., 0/255.),\n (248/255., 139/255., 0/255.),\n (249/255., 142/255., 0/255.),\n (249/255., 145/255., 0/255.),\n (250/255., 149/255., 0/255.),\n (251/255., 154/255., 0/255.),\n (252/255., 159/255., 0/255.),\n (253/255., 163/255., 0/255.),\n (253/255., 168/255., 0/255.),\n (253/255., 172/255., 0/255.),\n (254/255., 176/255., 0/255.),\n (254/255., 179/255., 0/255.),\n (254/255., 184/255., 0/255.),\n (254/255., 187/255., 0/255.),\n (254/255., 191/255., 0/255.),\n (254/255., 195/255., 0/255.),\n (254/255., 199/255., 0/255.),\n (254/255., 202/255., 1/255.),\n (254/255., 205/255., 2/255.),\n (254/255., 208/255., 5/255.),\n (254/255., 210/255., 7/255.),\n (254/255., 212/255., 9/255.),\n (254/255., 216/255., 12/255.),\n (255/255., 219/255., 15/255.),\n (255/255., 221/255., 23/255.),\n (255/255., 223/255., 27/255.),\n (255/255., 225/255., 32/255.),\n (255/255., 227/255., 39/255.),\n (255/255., 229/255., 50/255.),\n (255/255., 232/255., 63/255.),\n (255/255., 235/255., 75/255.),\n (255/255., 238/255., 88/255.),\n (255/255., 239/255., 102/255.),\n (255/255., 241/255., 116/255.),\n (255/255., 242/255., 134/255.),\n (255/255., 243/255., 140/255.),\n (255/255., 244/255., 149/255.),\n (255/255., 244/255., 155/255.),\n (255/255., 245/255., 164/255.),\n (255/255., 246/255., 171/255.),\n (255/255., 247/255., 179/255.),\n (255/255., 247/255., 185/255.),\n (255/255., 248/255., 192/255.),\n (255/255., 248/255., 197/255.),\n (255/255., 249/255., 203/255.),\n (255/255., 250/255., 209/255.),\n (255/255., 251/255., 216/255.),\n (255/255., 252/255., 222/255.),\n (255/255., 253/255., 228/255.),\n (255/255., 253/255., 234/255.),\n (255/255., 254/255., 239/255.),\n (255/255., 254/255., 245/255.),\n (255/255., 255/255., 249/255.),\n (255/255., 255/255., 250/255.),\n (255/255., 255/255., 251/255.),\n (255/255., 255/255., 252/255.),\n (255/255., 255/255., 253/255.),\n (255/255., 255/255., 254/255.),\n (255/255., 255/255., 255/255.)]\n\n seqLen = len(col_seq)\n delta = 1.0/(seqLen - 1)\n r_tuple = ((i*delta, col_seq[i][0], col_seq[i][0]) for i in range(seqLen))\n g_tuple = ((i*delta, col_seq[i][1], col_seq[i][1]) for i in range(seqLen))\n b_tuple = ((i*delta, col_seq[i][2], col_seq[i][2]) for i in range(seqLen))\n cdict = {'red': tuple(r_tuple),\n 'green': tuple(g_tuple),\n 'blue': tuple(b_tuple)}\n cwm = _mplb.colors.LinearSegmentedColormap('psfiron', cdict, N)\n return cwm",
"def scale(self):\n return self.distribution.scale",
"def get_scale_parameter(self):\r\n \r\n if self.scale_parameter == 0.0: \r\n shape_in_gamma_func = float(1+(1/self.shape_parameter))\r\n gamma_func = special.gamma(shape_in_gamma_func)\r\n self.scale_parameter = (self.mean_fire_recurrence/gamma_func)\r\n return self.scale_parameter\r\n else:\r\n return self.scale_parameter",
"def scale(self):\n return self.scale_factor / CONSTANTS.AU",
"def ex2d(image, ivar, psf, specrange, wavelengths, xyrange=None,\n full_output=False, regularize=0.0):\n\n #- Range of image to consider\n waverange = (wavelengths[0], wavelengths[-1])\n \n if xyrange is None:\n xmin, xmax, ymin, ymax = xyrange = psf.xyrange(specrange, waverange)\n image = image[ymin:ymax, xmin:xmax]\n ivar = ivar[ymin:ymax, xmin:xmax]\n else:\n xmin, xmax, ymin, ymax = xyrange\n\n nx, ny = xmax-xmin, ymax-ymin\n npix = nx*ny\n \n nspec = specrange[1] - specrange[0]\n nwave = len(wavelengths)\n \n #- Solve AT W pix = (AT W A) flux\n \n #- Projection matrix and inverse covariance\n A = psf.projection_matrix(specrange, wavelengths, xyrange)\n\n #- Pixel weights matrix\n w = ivar.ravel()\n W = spdiags(ivar.ravel(), 0, npix, npix)\n\n #-----\n #- Extend A with an optional regularization term to limit ringing.\n #- If any flux bins don't contribute to these pixels,\n #- also use this term to constrain those flux bins to 0.\n \n #- Original: exclude flux bins with 0 pixels contributing\n # ibad = (A.sum(axis=0).A == 0)[0]\n \n #- Identify fluxes with very low weights of pixels contributing \n fluxweight = W.dot(A).sum(axis=0).A[0]\n minweight = 0.01*np.max(fluxweight)\n ibad = fluxweight < minweight\n \n #- Add regularization of low weight fluxes\n I = regularize*scipy.sparse.identity(nspec*nwave)\n I.data[0,ibad] = minweight - fluxweight[ibad]\n \n #- Only need to extend A if regularization is non-zero\n if np.any(I.data):\n pix = np.concatenate( (image.ravel(), np.zeros(nspec*nwave)) )\n Ax = scipy.sparse.vstack( (A, I) )\n wx = np.concatenate( (w, np.ones(nspec*nwave)) )\n else:\n pix = image.ravel()\n Ax = A\n wx = w\n\n #- Inverse covariance\n Wx = spdiags(wx, 0, len(wx), len(wx))\n iCov = Ax.T.dot(Wx.dot(Ax))\n \n #- Solve (image = A flux) weighted by Wx:\n #- A^T W image = (A^T W A) flux = iCov flux \n y = Ax.T.dot(Wx.dot(pix))\n \n xflux = spsolve(iCov, y).reshape((nspec, nwave))\n\n #- Solve for Resolution matrix\n try:\n R, fluxivar = resolution_from_icov(iCov)\n except np.linalg.linalg.LinAlgError, err:\n outfile = 'LinAlgError_{}-{}_{}-{}.fits'.format(specrange[0], specrange[1], waverange[0], waverange[1])\n print \"ERROR: Linear Algebra didn't converge\"\n print \"Dumping {} for debugging\".format(outfile)\n import fitsio\n fitsio.write(outfile, image, clobber=True)\n fitsio.write(outfile, ivar, extname='IVAR')\n fitsio.write(outfile, A.data, extname='ADATA') \n fitsio.write(outfile, A.indices, extname='AINDICES')\n fitsio.write(outfile, A.indptr, extname='AINDPTR')\n fitsio.write(outfile, iCov.toarray(), extname='ICOV')\n raise err\n \n #- Convolve with Resolution matrix to decorrelate errors\n fluxivar = fluxivar.reshape((nspec, nwave))\n rflux = R.dot(xflux.ravel()).reshape(xflux.shape)\n\n if full_output:\n results = dict(flux=rflux, ivar=fluxivar, R=R, xflux=xflux, A=A)\n results['iCov'] = iCov\n return results\n else:\n return rflux, fluxivar, R",
"def rescale(ds,normalization):\n norm_constant = float(normalization)/ds.max()\n ds *= norm_constant\n info_string = \"Intensities were then multiplied by %f to give a maximum intensity of %f.\" % (norm_constant,float(normalization))\n ds.add_metadata(\"_pd_proc_info_data_reduction\",info_string,append=True)",
"def scale(self):\n return self._gev_bijector.scale",
"def calcSFP(gamma_tilt_deg, created_pal, SFP_config, Tech_res):\n # general\n gamma_n = gamma_tilt_deg / 180 * np.pi\n N = SFP_config.N\n \n ########################## SIMULATION SETUP ###############################\n # reference pressure\n p0 = 2 * 10**(-5)\n # frequencies\n f = get_freq_vec(N_freq=120, step_freq=1/12, freq_range=[20,20000])\n f_xy = np.array([100, 200, 400, 800, 1000, 2000, 5000, 10000, 16000])\n\n # initialize variables\n omega = 2 * np.pi * f\n omega_xy = 2 * np.pi * f_xy\n D_opt_LSA = np.ones([N, np.shape(f)[0]])\n P_LSA = np.zeros([np.shape(created_pal.xline)[0],np.shape(f)[0]], dtype=complex)\n\n # air attenuation\n alpha, c = AirAbsorptionCoefficient(f, T=293.15, p=101.325*10**(3), h=50)\n\n # directivity\n # if PALC_config.directivity not in ['Measured Loudspeaker Data']:\n # dire_meas_LSA = np.ones([np.shape(f)[0],np.shape(f)[0]])\n # dire_meas_deg = np.ones([np.shape(f)[0],np.shape(f)[0]])\n \n ######################### SPL CALCULATION #################################\n x_start, y_start, x_stop, y_stop, x_c_n, y_c_n, x_S, y_S = source_pos(gamma_n, SFP_config)\n\n for n in range(np.shape(f)[0]):\n G_LSA_vert = CalcGreenFunctions(created_pal.xline, created_pal.yline, np.array([0]), \\\n x_c_n, y_c_n, 0.82, SFP_config.directivity, \\\n SFP_config.Lambda_y, gamma_n, c, omega[n], 1, \\\n np.array(SFP_config.dir_meas[:,n]), \\\n np.array(SFP_config.dir_meas_deg[:,1]), \\\n alpha[n], f, n )\n\n P_LSA[:,n] = G_LSA_vert @ D_opt_LSA[:,n] # D_opt_LSA possibility to include driving functions\n p_SPL = 20 * np.log10(np.abs(P_LSA) / p0)\n Tech_res.update_tech_meas(p_SPL=p_SPL, f=f)\n return x_S, y_S",
"def test_smoothing():\n spec = IGRINSSpectrum(file=file)\n new_spec = spec.remove_outliers(threshold=3)\n\n assert len(new_spec.flux) > 0\n assert new_spec.shape[0] <= spec.shape[0]\n assert new_spec.shape[0] > 0\n assert new_spec.mask is not None",
"def convolve_psf(a, fwhm, edge='invert', replace_nan=True, debug=False):\n const2 = 2.354820046 # 2*sqrt(2*ln(2))\n const100 = 3.034854259 # sqrt(2*ln(100))\n sigma = fwhm / const2\n # gaussian drops to 1/100 of maximum value at x =\n # sqrt(2*ln(100))*sigma, so number of pixels to include from\n # centre of gaussian is:\n n = np.ceil(const100 * sigma)\n if replace_nan:\n a = nan2num(a, replace='interp')\n if debug:\n print(\"First and last {0} pixels of output will be invalid\".format(n))\n x = np.linspace(-n, n, 2*n + 1) # total no. of pixels = 2n+1\n gauss = np.exp(-0.5 * (x / sigma) ** 2 )\n\n return convolve_window(a, gauss, edge=edge)",
"def scaling(self):\n return self._scaling",
"def scaling(self):\n return self._scaling",
"def __init__(self, psf,\n wave_array=[487, 625, 770, 870, 1020, 1250, 1650, 2120],\n grid_shape=[11,11]):\n self.img_scale = 0.10 # arcseconds per pixel\n \n # Fix wave_array to be a float\n wave_array = np.array(wave_array, dtype=float)\n wave_shape = psf.shape[0]\n\n if wave_shape != len(wave_array):\n print 'Problem with PSF shape and wave_array shape'\n \n # Reshape the array to get the X and Y positions\n psf = psf.reshape((wave_shape, grid_shape[0], grid_shape[1],\n psf.shape[2], psf.shape[3]))\n psf = np.swapaxes(psf, 1, 2)\n\n # scale array = lambda / 2D (Nyquist sampled)\n tel_diam = 2.235 # meters\n psf_scale = wave_array * (206264.8 * 1e-9) / (2.0 * tel_diam) # arcsec / pixel\n \n # Calculate the positions of all these PSFs. We assume that the\n # outermost PSFs are at the corners such that all observed stars\n # are internal to these corners.\n x_pos = np.mgrid[0:grid_shape[0]]\n y_pos = np.mgrid[0:grid_shape[1]]\n\n # Need to multiply by some of the array size properties.\n # Note that this assumes a pixel scale.\n fov = 10. # arcmin\n fov *= 60. # arcsec\n fov /= self.img_scale # pixels\n\n x_pos *= fov / x_pos[-1]\n y_pos *= fov / y_pos[-1]\n\n \n self.psf = psf\n self.psf_x = x_pos\n self.psf_y = y_pos\n self.psf_wave = wave_array\n self.wave_shape = wave_shape\n self.grid_shape = grid_shape\n self.psf_scale = psf_scale\n\n return",
"def peak_PSF(self):\n im, strehl = self.compute_PSF(np.zeros(self.N_act))\n return strehl",
"def invmelscale(b):\n return 700.0 * (np.exp(b / 1125.0) - 1)",
"def scaling(sx,sy,Mat):\r\n # SM is the Scaling Matrix ( 3 X 3 )\r\n SM = [[sx,0,0],[0,sy,0],[0,0,1]]\r\n Scaled = Multiply(SM,Mat)\r\n # Scaled[0][0] is the updated x coordinate\r\n # Scaled[1][0] is the updated y coordinate\r\n return Scaled[0][0],Scaled[1][0],Scaled[2][0]",
"def update(self, es, **kwargs):\n es.sigma = self.coefficient * es.sp.weights.mueff * _norm(es.mean) / es.N / es.sp.cmean",
"def spectral_spread(sign, fs):\n f, ff = plotfft(sign, fs)\n spect_centr = spectral_centroid(sign, fs)\n if not np.sum(ff):\n return 0\n else:\n return np.dot(((f-spect_centr)**2), (ff / np.sum(ff)))",
"def scale(self,id,x,y,s):\n if id not in self.elements.keys():\n print(\"Id input not registered! Please check your process\")\n return False\n element=self.elements[id]\n state=element.scale(self.h-1-y,x,s,self.w,self.h)\n if state==True:\n self.canvas=np.ones((self.h,self.w,3),dtype=np.uint8)*255\n self.sync=False\n return state",
"def _ssim_for_multiscale(img1, img2, max_val=255, filter_size=11,\n filter_sigma=1.5, k1=0.01, k2=0.03):\n\n _, height, width, _ = img1.shape\n\n # Filter size can't be larger than height or width of images.\n #size = tf.min(filter_size, height, width)\n size = filter_size\n\n # Scale down sigma if a smaller filter size is used.\n sigma = size * filter_sigma / filter_size if filter_size else 0\n\n if filter_size:\n window = broadcast_to(tf.reshape(_f_special_gauss(size, sigma),\n (size, size, 1, 1)), (size, size, 3,1))\n mu1 = conv(img1, window)\n mu2 = conv(img2, window)\n sigma11 = conv(img1 * img1, window)\n sigma22 = conv(img2 * img2, window)\n sigma12 = conv(img1 * img2, window)\n else:\n # Empty blur kernel so no need to convolve.\n mu1, mu2 = img1, img2\n sigma11 = img1 * img1\n sigma22 = img2 * img2\n sigma12 = img1 * img2\n\n mu11 = mu1 * mu1\n mu22 = mu2 * mu2\n mu12 = mu1 * mu2\n sigma11 -= mu11\n sigma22 -= mu22\n sigma12 -= mu12\n\n # Calculate intermediate values used by both ssim and cs_map.\n c1 = (k1 * max_val) ** 2\n c2 = (k2 * max_val) ** 2\n v1 = 2.0 * sigma12 + c2\n v2 = sigma11 + sigma22 + c2\n ssim = tf.reduce_mean((((2.0 * mu12 + c1) * v1) / ((mu11 + mu22 + c1) * v2)))\n cs = tf.reduce_mean(v1 / v2)\n return ssim, cs",
"def update(self, new_scale=None):\n if not self._enabled:\n return\n\n _scale, _growth_tracker = self._check_scale_growth_tracker(\"update\")\n\n if new_scale is not None:\n # Accept a new user-defined scale.\n if isinstance(new_scale, float):\n self._scale.fill_(new_scale) # type: ignore[union-attr]\n else:\n reason = \"new_scale should be a float or a 1-element torch.cuda.FloatTensor with requires_grad=False.\"\n assert isinstance(new_scale, torch.cuda.FloatTensor), reason # type: ignore[attr-defined]\n assert new_scale.numel() == 1, reason\n assert new_scale.requires_grad is False, reason\n self._scale.copy_(new_scale) # type: ignore[union-attr]\n else:\n # Consume shared inf/nan data collected from optimizers to update the scale.\n # If all found_inf tensors are on the same device as self._scale, this operation is asynchronous.\n found_infs = [\n found_inf.to(device=_scale.device, non_blocking=True)\n for state in self._per_optimizer_states.values()\n for found_inf in state[\"found_inf_per_device\"].values()\n ]\n\n assert len(found_infs) > 0, \"No inf checks were recorded prior to update.\"\n\n found_inf_combined = found_infs[0]\n\n # Update across all model parallel instances.\n torch.distributed.all_reduce(\n found_inf_combined, op=torch.distributed.ReduceOp.MAX, group=parallel_state.get_model_parallel_group()\n )\n\n if len(found_infs) > 1:\n for i in range(1, len(found_infs)):\n found_inf = found_infs[i]\n # Update across all model parallel instances.\n torch.distributed.all_reduce(\n found_inf, op=torch.distributed.ReduceOp.MAX, group=parallel_state.get_model_parallel_group()\n )\n found_inf_combined += found_inf\n\n if found_inf_combined > 0:\n self._hysteresis_tracker -= 1\n if self._hysteresis_tracker <= 0:\n # When hysteresis becomes zero, follow the native grad scale update rule.\n # Increase scale and reset growth tracker\n torch._amp_update_scale_(\n _scale,\n _growth_tracker,\n found_inf_combined,\n self._growth_factor,\n self._backoff_factor,\n self._growth_interval,\n )\n else:\n # Only reset the growth tracker when hysteresis is larger than zero\n _growth_tracker.fill_(0.0)\n else:\n # When no inf found, follow the native grad scale update rule.\n # Increment growth_tracker, update scale when growth tracker reaches the interval, and\n # reset the hysteresis tracker.\n torch._amp_update_scale_(\n _scale,\n _growth_tracker,\n found_inf_combined,\n self._growth_factor,\n self._backoff_factor,\n self._growth_interval,\n )\n self._hysteresis_tracker = self.hysteresis\n\n # To prepare for next iteration, clear the data collected from optimizers this iteration.\n self._per_optimizer_states = defaultdict(torch.cuda.amp.grad_scaler._refresh_per_optimizer_state)",
"def get_scale_parameter(self):\n\n if self.scale_parameter == 0.0:\n shape_in_gamma_func = float(1+(1/self.shape_parameter))\n gamma_func = special.gamma(shape_in_gamma_func)\n self.scale_parameter = (self.mean_fire_recurrence/gamma_func)\n return self.scale_parameter\n else:\n return self.scale_parameter",
"def scale(self, sx : float, sy : float, sz : float):\n answ = self.clone()\n for i in range(len(self._elements)):\n answ._elements[i]._element = self._elements[i].element.scale(sx, sy, sz)\n\n return answ",
"def rescale_intensity_v2(im, low, high):\n\n from skimage.exposure import rescale_intensity, adjust_gamma\n if low > high:\n im_out = rescale_intensity(low-im.astype(np.float), (0, low-high), (0, 255)).astype(np.uint8)\n else:\n im_out = rescale_intensity(im.astype(np.float), (low, high), (0, 255)).astype(np.uint8)\n return im_out",
"def scale(self):\n return self._scale",
"def objective_epsf(params, self, psf_xy, sci, ivar, xp, yp, extended_data, ret):\n dx = xp-params[1]\n dy = yp-params[2]\n\n ddx = xp-xp.min()\n ddy = yp-yp.min()\n\n ddx = ddx/ddx.max()\n ddy = ddy/ddy.max()\n \n psf_offset = self.eval_ePSF(psf_xy, dx, dy, extended_data=extended_data)*params[0] + params[3] + params[4]*ddx + params[5]*ddy + params[6]*ddx*ddy\n \n if ret == 'resid':\n return (sci-psf_offset)*np.sqrt(ivar)\n \n chi2 = np.sum((sci-psf_offset)**2*(ivar))\n #print(params, chi2)\n return chi2",
"def analysis_function_fwhm_psf(self, system, wavelength_idx, surface, config, px, py, spaxel_scale, N_points, mode):\n\n start0 = time()\n # Set Current Configuration\n system.MCE.SetCurrentConfiguration(config)\n\n # First of all, we need to find the Surface Number for the IMAGE SLICER \"Image Plane\"\n N_surfaces = system.LDE.NumberOfSurfaces\n surface_names = {} # A dictionary of surface number -> surface comment\n for k in np.arange(1, N_surfaces):\n surface_names[k] = system.LDE.GetSurfaceAt(k).Comment\n # find the Slicer surface number\n try:\n slicer_num = list(surface_names.keys())[list(surface_names.values()).index('Image Plane')]\n except ValueError:\n slicer_num = list(surface_names.keys())[list(surface_names.values()).index('Image plane')]\n slicer_surface = slicer_num\n\n # Get the Field Points for that configuration\n sysField = system.SystemData.Fields\n N_fields = sysField.NumberOfFields\n N_waves = len(wavelength_idx)\n\n X_MAX = np.max([np.abs(sysField.GetField(i + 1).X) for i in range(N_fields)])\n Y_MAX = np.max([np.abs(sysField.GetField(i + 1).Y) for i in range(N_fields)])\n\n # Use the Field Point at the centre of the Slice\n fx, fy = sysField.GetField(2).X, sysField.GetField(2).Y\n hx, hy = fx / X_MAX, fy / Y_MAX # Normalized field coordinates (hx, hy)\n obj_xy = np.array([fx, fy])\n\n N_pupil = px.shape[0] # Number of rays in the Pupil for a given field point and wavelength\n N_rays = N_waves * N_pupil\n\n FWHM = np.zeros((N_waves, 2))\n foc_xy = np.zeros((N_waves, 2))\n\n slicer_xy = np.empty((N_waves, N_pupil, 2))\n slicer_xy[:] = np.nan\n detector_xy = np.empty((N_waves, N_pupil, 2))\n detector_xy[:] = np.nan\n\n # (1) Run the raytrace up to the IMAGE SLICER\n raytrace = system.Tools.OpenBatchRayTrace()\n # remember to specify the surface to which you are tracing!\n rays_slicer = raytrace.CreateNormUnpol(N_rays, constants.RaysType_Real, slicer_surface)\n\n # Loop over all wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n\n for (p_x, p_y) in zip(px, py): # Add the ray to the RayTrace\n rays_slicer.AddRay(wave_idx, hx, hy, p_x, p_y, constants.OPDMode_None)\n\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n rays_slicer.StartReadingResults()\n checksum_slicer = 0\n for k in range(N_rays): # Get Raytrace results at the Image Slicer\n i_wave = k // N_pupil\n j_pupil = k % N_pupil\n # print(i_wave, j_pupil)\n output = rays_slicer.ReadNextResult()\n # Note that we ignore the vignetting\n if output[2] == 0:\n slicer_xy[i_wave, j_pupil, 0] = output[4]\n slicer_xy[i_wave, j_pupil, 1] = output[5]\n checksum_slicer += 1\n if checksum_slicer < N_rays:\n # this is now kind of pointless since we directly ignore the vignetting\n raise ValueError('Some rays were lost before the Image Slicer')\n\n rays_slicer.ClearData()\n\n # (2) Run the raytrace up to the DETECTOR\n # For speed, we re-use the same Raytrace, just define new rays!\n # raytrace_det = system.Tools.OpenBatchRayTrace()\n # Detector is always the last surface\n detector_surface = system.LDE.NumberOfSurfaces - 1\n rays_detector = raytrace.CreateNormUnpol(N_rays, constants.RaysType_Real, detector_surface)\n # Loop over all wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n for (p_x, p_y) in zip(px, py):\n rays_detector.AddRay(wave_idx, hx, hy, p_x, p_y, constants.OPDMode_None)\n\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n\n rays_detector.StartReadingResults()\n checksum_detector = 0\n index_valid_detector = np.empty((N_waves, N_pupil))\n index_valid_detector[:] = np.nan\n for k in range(N_rays): # Get Raytrace results at the Detector\n i_wave = k // N_pupil\n j_pupil = k % N_pupil\n output = rays_detector.ReadNextResult()\n if output[2] == 0: # ErrorCode & VignetteCode\n detector_xy[i_wave, j_pupil, 0] = output[4]\n detector_xy[i_wave, j_pupil, 1] = output[5]\n checksum_detector += 1\n index_valid_detector[i_wave, j_pupil] = j_pupil\n #\n # elif output[2] == 0 and output[3] != 0:\n # # Some rays are vignetted\n # code = output[3]\n # vignetting_surface = system.LDE.GetSurfaceAt(code).Comment\n # # print(\"Config #%d | Wave #%d || Vignetting at surface #%d: %s\" % (config, wavelength_idx[i_wave], code, vignetting_surface))\n\n rays_detector.ClearData()\n CastTo(raytrace, 'ISystemTool').Close()\n\n # time_rays = time() - start0\n # print(\"Time for Raytrace Slicer and Detector: %.3f sec\" % time_rays)\n\n # fig, axes = plt.subplots(2, N_waves)\n # colors = cm.Reds(np.linspace(0.5, 1, N_waves))\n # for j in range(N_waves):\n # ax1 = axes[0][j]\n # ax1.scatter(slicer_xy[j, :, 0], slicer_xy[j, :, 1], s=3, color=colors[j])\n # # scx =\n # # ax1.scatter(sli_foc_xy[j, 0], sli_foc_xy[j, 1], s=3, color='black')\n # # scy = sli_foc_xy[j, 1]\n # # ax1.axhline(y=scy + 1.0 * box_size / 2, color='black', linestyle='--')\n # # ax1.axhline(y=scy - 1.0 * box_size / 2, color='black', linestyle='--')\n # wavelength = system.SystemData.Wavelengths.GetWavelength(wavelength_idx[j]).Wavelength\n # ax1.set_title(\"%.3f $\\mu$m\" % wavelength)\n #\n # ax2 = axes[1][j]\n # # dcx = det_foc_xy[j, 0]\n # ax2.scatter(detector_xy[j, :, 0], detector_xy[j, :, 1], s=3, color=colors[j])\n # # ax2.scatter(det_foc_xy[j, 0], det_foc_xy[j, 1], s=3, color='black')\n # # ax2.axvline(x=dcx + det_pix * box_size / 2, color='black', linestyle='--')\n # # ax2.axvline(x=dcx - det_pix * box_size / 2, color='black', linestyle='--')\n #\n\n # FWHM\n\n # Plate scales [Px, Py] for each spaxel scale in mm / arcsec, depending on the surface\n plate_scales = {'IS': {4.0: [125, 250], 60.0: [16.67, 16.67]},\n 'DET': {4.0: [3.75, 7.5], 60.0: [0.5, 0.5]}}\n\n # Hard-coded values for the PSF windows in microns\n windows = {4.0: [5000, 200], 60.0: [500, 50]}\n win_slicer = windows[spaxel_scale][0]\n win_detect = windows[spaxel_scale][1]\n if config == 1:\n print(\"Sampling the Image Slicer plane with %d points: %.3f microns / point\" % (N_points, (win_slicer / N_points)))\n print(\"Sampling the Detector plane with %d points: %.3f microns / point\" % (N_points, (win_detect / N_points)))\n\n for i_wave, wave_idx in enumerate(wavelength_idx):\n\n # Calculate the FWHM at the Image Slicer\n xy_data_slicer = slicer_xy[i_wave]\n wavelength = system.SystemData.Wavelengths.GetWavelength(wave_idx).Wavelength\n fwhm_x_slicer, fwhm_y_slicer, psf_slicer = self.calculate_fwhm(surface='IS', xy_data=xy_data_slicer, PSF_window=win_slicer,\n N_points=N_points, spaxel_scale=spaxel_scale,\n wavelength=wavelength, mode=mode)\n\n # Calculate the FWHM at the Detector\n xy_data_detector = detector_xy[i_wave]\n fwhm_x_det, fwhm_y_det, psf_det = self.calculate_fwhm(surface='DET', xy_data=xy_data_detector, PSF_window=win_detect,\n N_points=N_points, spaxel_scale=spaxel_scale,\n wavelength=wavelength, mode=mode)\n\n # This is just in case Fraser needs the actual GeoPSF arrays in the future, not just the FWHM values\n # if mode == 'geometric':\n # # save the Geometric PSF at the detector\n # scale_str = '60x30' if spaxel_scale == 60.0 else '4x4'\n # for ifu in ['AB', 'CD', 'EF', 'GH']:\n # file_suffix = 'geo_psf_%s_IFU-%s_%d_nm_config%s' % (scale_str, ifu, 1000 * wavelength, config)\n # # check if the IFU already exists\n # try:\n # _test = np.load(file_suffix + '.npy')\n # # already exists, so it must be the next one\n # except FileNotFoundError:\n # np.save(file_suffix, psf_det)\n # # break, otherwise we save the same result 4x under all IFU names\n # break\n\n # plt.show()\n\n foc_xy[i_wave] = [np.mean(xy_data_detector[:, 0]), np.mean(xy_data_detector[:, 1])]\n\n FWHM[i_wave] = [fwhm_x_det, fwhm_y_slicer]\n\n if config == 1:\n # Print the results as we go so that we get an idea\n # transform to milli-arcseconds\n platex = plate_scales['DET'][spaxel_scale][0]\n platey = plate_scales['IS'][spaxel_scale][1]\n fwhm_x_det_mas = fwhm_x_det / platex\n fwhm_y_slicer_mas = fwhm_y_slicer / platey\n\n print(\"%.3f microns\" % wavelength)\n print(\"FWHM in X [Detector]: %.1f microns (%.1f mas) | \"\n \"in Y [Image Slicer]: %.2f microns (%.1f mas)\" % (fwhm_x_det, fwhm_x_det_mas, fwhm_y_slicer, fwhm_y_slicer_mas))\n\n return FWHM, obj_xy, foc_xy",
"def snr_f(self, image):\n image_ps = self.pow_spec(image)\n noise_level = numpy.sum(self.rim*image_ps)/numpy.sum(self.rim)\n return numpy.sqrt(image_ps[int(self.size/2), int(self.size/2)]/noise_level)",
"def get_scaled_image(self, min_width=1024):\n self.dispersion = self.image.getCalibration().pixelWidth\n self.offset = self.image.getCalibration().xOrigin\n binning = 1\n while binning * min_width < self.image.getWidth():\n binning *= 2\n if binning > 1:\n binning /= 2\n IJ.run(self.image, 'Select None', '')\n new_image = self.image.crop()\n IJ.run(new_image, 'Bin...', 'x=%d y=%d binning=Average' %\n (binning, binning))\n self.dispersion *= binning\n self.offset /= binning\n self.image = new_image\n return new_image",
"def rescale_intensity(image, thres=(1.0, 99.0)):\n val_l, val_h = np.percentile(image, thres)\n image2 = image\n image2[image < val_l] = val_l\n image2[image > val_h] = val_h\n image2 = (image2.astype(np.float32) - val_l) / (val_h - val_l)\n return image2",
"def calc_scaled_waveform(\n waveform: np.ndarray,\n lin_factor: np.ndarray,\n pow2_factor: np.ndarray,\n logger=empty_logger()\n) -> np.ndarray:\n\n logger.info(\"scaling waveform\")\n\n return (\n 10e-9\n * (2 ** pow2_factor)\n * lin_factor\n * waveform.swapaxes(0, 1)\n ).swapaxes(0, 1)",
"def plot_psf_scale_map(self, vmin=-0.5, vmax=0.5):\n\n scale_map = self.get_psf_scale_map()\n\n pyplot.title(\"PSF $\\sigma_1$ scale map\")\n pyplot.semilogx()\n\n pyplot.xlabel('Energy, TeV')\n pyplot.ylabel('Off-center angle, deg')\n pyplot.pcolormesh(scale_map['E_edges'], scale_map['Theta_edges'], scale_map['sigma_1'].transpose(),\n cmap='seismic', vmin=vmin, vmax=vmax)\n pyplot.colorbar()",
"def initS0(self, img, size):\n\n I0 = cv.resize(img, (size[1], size[0]),\n interpolation=cv.INTER_LINEAR) # Bilinear interpolated upsampled image\n\n amplitude = np.sqrt(I0)\n\n FI0 = fft2(ifftshift(amplitude))\n FI0 = fftshift(FI0) # FI0.shape[0]\n S = np.array(FI0, dtype=np.complex64)\n return S",
"def test_epsf_build(self):\n\n size = 25\n oversampling = 4.\n stars = extract_stars(self.nddata, self.init_stars, size=size)\n epsf_builder = EPSFBuilder(oversampling=oversampling, maxiters=20,\n progress_bar=False)\n epsf, fitted_stars = epsf_builder(stars)\n\n ref_size = (size * oversampling) + 1\n assert epsf.data.shape == (ref_size, ref_size)\n\n y0 = int((ref_size - 1) / 2)\n z = epsf.data[y0, :]\n ampl, peak, sigma = gaussian1d_moments(z)\n assert_allclose(ampl, 0.002487, rtol=1e-4)\n assert_allclose(peak, y0, rtol=1e-3)\n assert_allclose(sigma, oversampling * self.stddev, rtol=1e-5)",
"def testScaleZeroPoint(self):\n\n ZEROPOINT = 27\n self.sctrl = afwMath.StatisticsControl()\n self.sctrl.setNanSafe(True)\n\n config = SpatialScaleZeroPointTask.ConfigClass()\n config.zeroPoint = ZEROPOINT\n config.interpStyle = \"CONSTANT\"\n config.selectFluxMag0.retarget(SelectLsstSimFluxMag0Task)\n config.selectFluxMag0.database = \"test_select_lsst_images\"\n zpScaler = SpatialScaleZeroPointTask(config=config)\n\n \"\"\" Note: this order does not properly retarget\n zpScaler = ScaleZeroPointTask()\n zpScaler.config.doInterpScale = True\n zpScaler.config.zeroPoint = ZEROPOINT\n zpScaler.config.interpStyle = \"CONSTANT\"\n zpScaler.config.selectFluxMag0.retarget(SelectLsstSimFluxMag0Task)\n zpScaler.config.selectFluxMag0.database = \"test_select_lsst_images\"\n \"\"\"\n\n outCalib = zpScaler.getCalib()\n self.assertAlmostEqual(outCalib.getMagnitude(1.0), ZEROPOINT)\n\n exposure = self.makeTestExposure(10, 10)\n # create dataId for exposure. Visit is only field needed. Others ignored.\n exposureId = {'ignore_fake_key': 1234, 'visit': 882820621}\n\n # API for computImageScale() takes a dataRef not a dataId.\n exposureFakeDataRef = WrapDataId(exposureId)\n # test methods: computeImageScale(), scaleMaskedImage(), getInterpImage()\n imageScaler = zpScaler.computeImageScaler(exposure, exposureFakeDataRef)\n scaleFactorIm = imageScaler.getInterpImage(exposure.getBBox())\n predScale = np.mean(imageScaler._scaleList) # 0.011125492863357\n\n self.assertAlmostEqual(afwMath.makeStatistics(scaleFactorIm, afwMath.VARIANCE, self.sctrl).getValue(),\n 0.0)\n self.assertAlmostEqual(afwMath.makeStatistics(scaleFactorIm, afwMath.MEAN, self.sctrl).getValue(),\n predScale)\n\n mi = exposure.getMaskedImage()\n imageScaler.scaleMaskedImage(mi)\n self.assertAlmostEqual(mi.get(1, 1)[0], predScale) # check image plane scaled\n self.assertAlmostEqual(mi.get(1, 1)[2], predScale**2) # check variance plane scaled\n\n exposure.setCalib(zpScaler.getCalib())\n self.assertAlmostEqual(exposure.getCalib().getFlux(ZEROPOINT), 1.0)",
"def _get_scaling(root):\n dpi = root.winfo_fpixels(\"1i\")\n scaling = dpi / 72.0\n logger.debug(\"dpi: %s, scaling: %s'\", dpi, scaling)\n return scaling",
"def des_psf_image(exptime=100,mag=None,seeing=[0.7,0.,0.],setbkg=True,moffat=False):\n gain = 0.21 # convert electrons to ADU\n npix = 40\n zeropoint = 26.794176 # r band, from Nikolay\n objectphoton = exptime*10**(0.4*(zeropoint - mag))\n if setbkg == False:\n skyphoton = 0.\n else:\n skyphoton = 8.460140*exptime #(sky level per pix per sec)\n bkg = skyphoton*gain # background in ADU\n if moffat == True:\n psf = moffat_psf(npix = npix,fwhm=seeing[0],beta=3.5,scale=0.27)\n else:\n psf = gauss_seeing(npix,seeing[0],seeing[1],seeing[2],scale = 0.27)\n img = (psf * objectphoton + skyphoton)*gain\n img = img + add_imageNoise(img)\n return img,bkg,psf",
"def test_fluxnorm():\n import time\n t1 = time.time()\n\n # define values\n im_lin_scale = 5 # make an image with this linear scale\n im_fill_value = 3. # fill it with this number\n im_scale = 1.3\n\n # First, make some Image with some total flux value (sum of pixel values) and scale\n im = galsim.ImageF(im_lin_scale, im_lin_scale)\n im.fill(im_fill_value)\n im.setScale(im_scale)\n total_flux = im_fill_value*(im_lin_scale**2)\n np.testing.assert_equal(total_flux, im.array.sum(),\n err_msg='Created array with wrong total flux')\n\n # Check that if we make an InterpolatedImage with flux normalization, it keeps that flux\n interp = galsim.InterpolatedImage(im) # note, flux normalization is the default\n np.testing.assert_equal(total_flux, interp.getFlux(),\n err_msg='Did not keep flux normalization')\n # Check that this is preserved when drawing\n im2 = interp.draw(dx = im_scale)\n np.testing.assert_equal(total_flux, im2.array.sum(),\n err_msg='Drawn image does not have expected flux normalization')\n\n # Now make an InterpolatedImage but tell it sb normalization\n interp_sb = galsim.InterpolatedImage(im, normalization = 'sb')\n # Check that when drawing, the sum is equal to what we expect if the original image had been\n # surface brightness\n im3 = interp_sb.draw(dx = im_scale)\n np.testing.assert_almost_equal(total_flux*(im_scale**2)/im3.array.sum(), 1.0, decimal=6,\n err_msg='Did not use surface brightness normalization')\n # Check that when drawing with sb normalization, the sum is the same as the original\n im4 = interp_sb.draw(dx = im_scale, normalization = 'sb')\n np.testing.assert_almost_equal(total_flux/im4.array.sum(), 1.0, decimal=6,\n err_msg='Failed roundtrip for sb normalization')\n\n # Finally make an InterpolatedImage but give it some other flux value\n interp_flux = galsim.InterpolatedImage(im, flux=test_flux)\n # Check that it has that flux\n np.testing.assert_equal(test_flux, interp_flux.getFlux(),\n err_msg = 'InterpolatedImage did not use flux keyword')\n # Check that this is preserved when drawing\n im5 = interp_flux.draw(dx = im_scale)\n np.testing.assert_almost_equal(test_flux/im5.array.sum(), 1.0, decimal=6,\n err_msg = 'Drawn image does not reflect flux keyword')\n\n t2 = time.time()\n print 'time for %s = %.2f'%(funcname(),t2-t1)",
"def update(self, es, **kwargs):\n es.sigma *= self.update2(es, **kwargs)\n if 11 < 3:\n # derandomized MSR = natural gradient descent using mean(z**2) instead of mu*mean(z)**2\n fit = kwargs['fit'] # == es.fit\n slengths = np.array([sum(z**2) for z in es.arz[fit.idx[:es.sp.weights.mu]]])\n # print lengths[0::int(es.sp.weights.mu/5)]\n es.sigma *= np.exp(np.dot(es.sp.weights, slengths / es.N - 1))**(2 / (es.N + 1))\n if 11 < 3:\n es.more_to_write.append(10**((sum(self.ps**2) / es.N / 2 - 1 / 2 if es.opts['CSA_squared'] else _norm(self.ps) / es.const.chiN - 1)))\n es.more_to_write.append(10**(-3.5 + sum(self.ps**2) / es.N / 2 - _norm(self.ps) / es.const.chiN))\n # es.more_to_write.append(10**(-3 + sum(es.arz[es.fit.idx[0]]**2) / es.N))",
"def energy_map(img):\n img_new = img.astype(float) #converting image to float\n total_energy = 0.0 # To store the sum of energy for all channels\n r,c,d = img.shape \n for i in range(d):\n dy = np.zeros([r, c], dtype=float) \n dx = np.zeros([r, c], dtype=float)\n if r > 1:\n dy = np.gradient(img_new[:,:,i], axis=0) #gradient along rows\n if c > 1:\n dx = np.gradient(img_new[:,:,i], axis=1) #gradient along columns\n total_energy += np.absolute(dy) + np.absolute(dx) \n return total_energy #Total energy map for entire image",
"def melscale(f):\n return 1125.0 * np.log(1 + f / 700.0)",
"def scale(img, scale):\n return resize(img, x_scale=scale, y_scale=scale)"
] | [
"0.61338806",
"0.6089663",
"0.6083601",
"0.60192114",
"0.58811396",
"0.5845473",
"0.57790524",
"0.56270677",
"0.56215096",
"0.5540787",
"0.5538469",
"0.5513849",
"0.55025077",
"0.54857355",
"0.5447501",
"0.5444619",
"0.5426935",
"0.5422314",
"0.5416228",
"0.5408127",
"0.53833055",
"0.5352067",
"0.53422153",
"0.5338832",
"0.5322048",
"0.53011394",
"0.5271971",
"0.52600205",
"0.5258094",
"0.5248959",
"0.52431005",
"0.5210241",
"0.5206171",
"0.51981264",
"0.5198062",
"0.519664",
"0.51810205",
"0.51808363",
"0.517885",
"0.51716876",
"0.5168372",
"0.5163276",
"0.5162728",
"0.5156935",
"0.51549685",
"0.51517093",
"0.5151639",
"0.51416695",
"0.51310617",
"0.51051414",
"0.5097691",
"0.50880116",
"0.5086068",
"0.50791734",
"0.5077128",
"0.50597006",
"0.5059077",
"0.5057019",
"0.5052183",
"0.5050625",
"0.5040868",
"0.5039785",
"0.50393665",
"0.5037549",
"0.50301015",
"0.50224984",
"0.5021844",
"0.50203043",
"0.5008198",
"0.5008198",
"0.5001085",
"0.49990505",
"0.49849597",
"0.4984573",
"0.49838346",
"0.49819824",
"0.4973795",
"0.49713904",
"0.49689424",
"0.49676582",
"0.49661106",
"0.4959778",
"0.4957475",
"0.49555814",
"0.49554327",
"0.49516398",
"0.49500993",
"0.4949487",
"0.4945803",
"0.49417382",
"0.4941477",
"0.49392223",
"0.49349588",
"0.49327862",
"0.49307722",
"0.4926975",
"0.4924893",
"0.49246946",
"0.49233437",
"0.49223796"
] | 0.65217674 | 0 |
update kth(0indexed) value with a | def set_val(self, k, a):
k += self.n - 1
self.dat[k] = a
while k > 0:
k = (k - 1) // 2 # parent
self.dat[k] = self.op(self.dat[k * 2 + 1], self.dat[k * 2 + 2]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __setitem__(self, k, v):\n\n self.valores[( zero - k )%self.longitud] = v",
"def _bucket_setitem(self, j, k, v):\n if self._table[j] is None:\n self._table[j] = UnsortedTableMap() # create new bucket at index j\n oldSize = len(self._table[j])\n self._table[j][k] = v\n if len(self._table[j]) > oldSize: # key is new to the table\n self._n += 1",
"def _bucket_setitem(self, j, k, v):\n pass",
"def __setitem__(self, k, v):\n j = self._hash_function(k)\n self._bucket_setitem(j, k, v)\n if self._n > len(self._table) // 2:\n self._resize(2 * len(self._table) -1)",
"def __setitem__(self, k, value):\n self._coords[k] = value",
"def put(k, v):\n index = get_index(k)\n hash_data[index] = v",
"def update(self, idx, value):\n idx = self.__capacity - 1 + idx\n self.__tree[idx] = value\n self.__update(idx)",
"def __setitem__(self, k, value):\n if k < 0:\n k += len(self)\n if value is not None:\n self.store_array.add_list_item(ListNode(value , k))",
"def modify_pos(self, k, delta):\n self.pos[k] += delta",
"def put(self, k: Any, v: Any):\n i = abs(hash(k)) % self.size\n current = self.data[i]\n while current is not None:\n if current.key == k:\n current.value = v\n return\n current = current.next\n new_node = self.Node(k, v)\n new_node.next = self.data[i]\n self.data[i] = new_node",
"def __setitem__(self, key, val):\n x, y = key\n self.matrix[y][x] = val",
"def __setitem__(self, idx, val):\n self.rows[idx[0]][idx[1]] = val",
"def __setitem__(self,k,v):\n self.insert(k,v)",
"def put(self, key, value):\n i = key //1000\n j = key%1000\n self.container[i][j] = value",
"def __setitem__(self, key, item):\n assert isinstance(key,list) and isinstance(item,list) and len(key)==2 and len(item)==2\n self._data[self.__ptBin(key[0])][self.__etaBin(key[1])] = item",
"def set_v_item(self, vindex, new_val):\n\n i = [((0, 0),),\n ((1, 1),),\n ((2, 2),),\n ([1, 2], [2, 1]),\n ([2, 0], [0, 2]),\n ([0, 1], [1, 0])]\n\n for j, k in i[vindex]:\n self[j, k] = new_val",
"def __setitem__(self, key: Tuple[int, int], value: complex) -> None:\n self.coeff[self._core.index_alpha(key[0]),\n self._core.index_beta(key[1])] = value",
"def set_idx(self, i, other, tensor_value):\n for k, v in self.variables.items():\n if k not in other.variables:\n self.variables[k][i] *= 0\n\n for k, v in other.variables.items():\n if k not in self.variables:\n self.variables[k] = np.zeros(tensor_value.shape)\n self.variables[k][i] = other.variables[k]",
"def __getitem__(self, k) :\n raise NotImplementedError",
"def __setitem__(self, key, value):\n mixed_positions, vindex_positions = _advanced_indexer_subspaces(key)\n self._array[key] = np.moveaxis(value, vindex_positions, mixed_positions)",
"def __setitem__(self, index, value):\n self.elem[index] = value",
"def __setitem__(self, ind: int, value: float) -> None:",
"def update(self, index: int, x: int):\n index += self.n2\n self.tree[index] = self.binary(self.tree[index], x)\n while index > 1:\n # (index ^ 1) はiと1の排他的論理和(XOR)\n x = self.binary(x, self.tree[index ^ 1])\n index >>= 1 # 右ビットシフトで親ノードのインデックスへ移動\n self.tree[index] = self.binary(self.tree[index], x)",
"def __setitem__(key, value):",
"def update_memory_x(self, x_k):\n self.mem_x[self.mem_idx, :] = x_k",
"def put(self, i, value):\n\t\tif(i < 0 or i >= self.d):\n\t\t\traise ValueError(\"Illegal index\")\n\t\tif(value == 0.0):\n\t\t\tself.st.delete(i)\n\t\telse:\n\t\t\tself.st.put(i,value)",
"def _bucket_getitem(self, j, k):\n pass",
"def updateH(self,k_vec,it):\n self.k_vec = k_vec\n self.it = it\n self.H_kc = fl.H_k(k_vec, self.it, self.delta)",
"def update(self, key, value):\n hash_key = hash(key) % self.length\n bucket = self.array[hash_key]\n if not bucket:\n raise ValueError('Key does not exist')\n for key_val_pair in bucket:\n if key_val_pair[0] == key:\n key_val_pair[1] = value\n break",
"def put(self, key: int, value: int) -> None:\n idx = key % self.size\n if self.mp[idx]:\n for i in range(len(self.mp[idx])):\n if self.mp[idx][i][0] == key:\n self.mp[idx][i][1] = value\n return\n self.mp[idx].append([key, value])\n else:\n self.mp[idx].append([key, value])",
"def update_store(self, value, index):\n if index == 1:\n self.state[self.M] = value\n else:\n self.state[-1] = value",
"def __getitem__(self, k):\r\n if not 0 <= k < self.n:\r\n return IndexError('It is out of bounds!')\r\n\r\n return self.A[k]",
"def __setitem__(self,key, value):\n cArray.cModule.set_element(self.arrayRef,ctypes.c_int(key),ctypes.c_int(value))",
"def update_aging_hash (aging_hash, k, increment=1):\n\n if not aging_hash.has_key(k):\n aging_hash[k] = (time.time(), increment)\n else:\n current_val = aging_hash[k]\n aging_hash[k] = (current_val[0], increment + current_val[1])\n return aging_hash[k]",
"def __setitem__(self, i, val):\n\t\tif i < self.n:\n\t\t\tself.v[i] = val",
"def __setitem__(self, index, value):\n self._update_value_at(index, value)",
"def updateValue(self,i,x):\n assert 0 <= i < len(self)\n self.__update_aux(0,0,len(self),i,x)",
"def __setitem__(self, key, value):\n self.xg[key] = value",
"def setPositionKey(self, time, index, value, id, view) -> None:\n ...",
"def __setitem__(self, index, value):\n if isinstance(index, tuple):\n list.__getitem__(self, index[0])[index[1]] = value\n elif isinstance(index, int):\n self.pop(index)\n self.insert(index, value)\n else:\n raise TypeError, \"Table indices must be int or tuple\"",
"def __setitem__(self, k, v):\n\t\treturn setattr(self, k, v)",
"def __setitem__(self, key, val):\n self[key][...] = val",
"def update_stats(self, idx, key):\n\n stats = self.stats\n if not stats.has_key(idx):\n stats[idx] = {}\n if stats[idx].has_key(key):\n stats[idx][key] += 1\n else:\n stats[idx][key] = 1",
"def set(self, k, x):\n k = self._checkIndex(k)\n c = self.caches[k]\n if len(c) >= self.N:\n result = c.pop()\n if result == x:\n result = None\n else:\n result = None\n c.appendleft(x)\n return result",
"def __setitem__(self, inds, value):\n i, j = inds\n self.array[i][j] = value",
"def update(self, key: T, value: T) -> None:\n\n if self.load_factor >= self.resize_threshold:\n self.resize() # increase table size once threshold is reached\n\n idx: int = self.hash_fn(key) # get an index location for 'key'\n if self.table[idx] is None: # idx location not occupied\n self.table[idx] = (key, value)\n self.filled_count += 1\n else: # idx location occupied\n if self.table[idx][0] == key: # trying to insert to the same key\n self.table[idx] = (self.table[idx][0], value) # update 'value' at 'key'\n else:\n # probe for next free position using double hashing\n idx2: int = self.h2(key)\n i: int = 1\n while self.table[(idx + i * idx2) % self.table_size] is not None:\n i += 1\n self.table[(idx + i * idx2) % self.table_size] = (key, value) # insert at an unoccupied location\n self.filled_count += 1",
"def update_memory_d(self, d_k):\n self.mem_d[self.mem_idx-1] = d_k",
"def setitem(self, i, j, value):\n # XXX: flint matrices do not support negative indices\n # XXX: They also raise ValueError instead of IndexError\n m, n = self.shape\n if i < 0:\n i += m\n if j < 0:\n j += n\n try:\n self.rep[i, j] = value\n except ValueError:\n raise IndexError(f\"Invalid indices ({i}, {j}) for Matrix of shape {self.shape}\")",
"def put(self, key: int, value: int) -> None:\n pos = self.hash(key)\n self.table[pos][key] = value",
"def increase(self, key:str) -> None:\n\n hash_key = self.hash_key(key)\n head = self.array[hash_key] \n \n while head.next: \n if head.next.key == key:\n head.next.value +=1\n head = head.next",
"def test_swap_k_one_k(self):\n \n nums = [1, 2, 3]\n k = 1\n\n a1.swap_k(nums, k)\n\n self.assertEqual(nums, [3, 2, 1])",
"def incrementSparseVector(a, scale, b):\n # BEGIN_YOUR_CODE (our solution is 2 lines of code, but don't worry if you deviate from this)\n ref = a if len(a)>len(b) else b\n for key in ref:\n a[key] += scale*b[key]\n # END_YOUR_CODE",
"def put(self, key: int, value: int) -> None:\n index = key % 10000\n head = self.array[index]\n while head.next:\n head = head.next\n if head.key == key:\n head.value = value\n return\n head.next = LinkedListNode(key, value)",
"def __setitem__(self, index: int, value: float) -> None:\n self._previous_values[index] = value",
"def __setitem__(self, i, value):\n self._ar[i] = value",
"def __setitem__(self, key, value):",
"def __setitem__(self, idx: int, val: float) -> None:\n assert 0 <= idx < self.capacity, f\"idx={idx} capacity={self.capacity}\"\n\n # Index of the leaf to insert into (always insert in \"second half\"\n # of the tree, the first half is reserved for already calculated\n # reduction-values).\n idx += self.capacity\n self.value[idx] = val\n\n # Recalculate all affected reduction values (in \"first half\" of tree).\n idx = idx >> 1 # Divide by 2 (faster than division).\n while idx >= 1:\n update_idx = 2 * idx # calculate only once\n # Update the reduction value at the correct \"first half\" idx.\n self.value[idx] = self.operation(\n self.value[update_idx], self.value[update_idx + 1]\n )\n idx = idx >> 1 # Divide by 2 (faster than division).",
"def update(self,haiku, typenum):\n self.occurrences += 1\n for i in range(2):\n for x in (haiku.triple[i]).wordarray:\n if (self.wordtype == dictionary.wordtype(x) and \n dictionary.word_filter(x) != self.word):\n self.update_adj_dict(x, i==typenum)",
"def _set_ks_dynamic(self, ks):\n assert(len(ks) == len(self.idxs))\n self.ks = ks\n if np.max(self.ks) > self._kret:\n self._kret = np.max(self.ks)",
"def __setitem__(self, key: tuple, value: float):\n s, a = key\n self.store.setdefault(s, dict())[a] = value",
"def replace(self, index, value):\n index += self.n\n self.data[index] = value\n index //= 2\n while index > 0:\n self.data[index] = self.func(self.data[2*index], self.data[2*index+1])\n index //= 2",
"def update(self, i, v):\n # index in BTree is 1 more than index in arr[]\n i += 1\n\n # Traverse to ancestors of BITree[i]\n while i <= self.size:\n self.BITree[i] += v\n\n # Update index to next set bit in binary representation\n i += i & (-i)",
"def put(self, key: int, value: int) -> None:\n t = key % 20011\n for item in self.hash[t]:\n if item[0] == key:\n item[1] = value\n return\n self.hash[t].append([key, value])",
"def update(self, idx, x):\n while idx < len(self.bit):\n self.bit[idx] += x\n idx |= idx + 1",
"def update_idx(self):\n self.idx = (self.F * self.FMUL +\n self.E * self.EMUL +\n self.Z * self.ZMUL +\n self.A * self.AMUL +\n self.B * self.BMUL )",
"def __setitem__(self, key, value):\n i, kv_pair = self._lookup(key, self._backing)\n self._backing[i] = KeyValue(key, value)\n if kv_pair is None:\n self._used += 1\n \n size = len(self._backing)\n utilization = self._used/size\n if utilization > 0.67:\n self._resize(self._incr_size(size))",
"def put(self, key, value):\n index = key % self.size\n\n if not self.bucket[index]:\n self.bucket[index] = ListNode(key , value)\n else:\n cur = self.bucket[index]\n\n while cur:\n if cur.key == key:\n cur.val = value\n return\n if not cur.next: break\n cur = cur.next\n cur.next = ListNode(key, value)",
"def index_wrap(self, k):\n return (self.first_player + k) % self.num_players",
"def __setitem__(self, j, val):\n\t\tself._coords[j] = val",
"def incr(self, x, term=1):\n self.d[x] = self.d.get(x, 0) + term",
"def _put(self, k, v, currNode):\n if k < currNode.key:\n if currNode.hasLeftChild():\n self._put(k, v, currNode.leftChild)\n else:\n currNode.leftChild = TreeNode(k, v, parent=currNode)\n\n elif k > currNode.key:\n if currNode.hasRightChild():\n self._put(k, v, currNode.rightChild)\n else:\n currNode.rightChild = TreeNode(k, v, parent=currNode)\n\n else:\n currNode.payload = v\n self.size -= 1",
"def __setitem__(self, key, value):\r\n T=type(key)\r\n if T!=types.IntType and T!=types.LongType:\r\n raise TypeError, \"index must be integer\"\r\n\r\n if key==0: self.x = value\r\n elif key==1: self.y = value\r\n elif key==2: self.z = value\r\n elif key==3: self.w = value\r\n else:\r\n raise IndexError,\"index out of range\"",
"def update(self, idx, add):\n idx += 1\n while idx < len(self.array):\n self.array[idx] += add\n idx += idx & -idx #Adding the last bit",
"def increment(self, index, value):\n self._inrange(index)\n if value==0:\n return\n found,ii = self._find_index(index)\n if found:\n self.value[ii] += value\n if self.value[ii] == 0:\n del self.index[ii]\n del self.value[ii]\n else:\n self.index.insert(ii, index)\n self.value.insert(ii, value)",
"def incrementK(self, step_size, dof1, dof2):\n self.K[dof1 - 1, dof1 - 1] += step_size\n self.K[dof1 - 1, dof2 - 1] += -step_size\n self.K[dof2 - 1, dof1 - 1] += -step_size\n self.K[dof2 - 1, dof2 - 1] += step_size",
"def add_item(self, i, k):\n if k == self.K:\n self.K += 1\n self.m_N_numerators[k, :] = self.prior.k_0*self.prior.m_0\n self.S_N_partials[k, :] = self.prior.S_0 + self.prior.k_0*self._cached_prior_square_m_0\n self.m_N_numerators[k, :] += self.X[i]\n self.S_N_partials[k, :] += self._cached_square[i]\n self.counts[k] += 1\n self._update_log_prod_vars_and_inv_vars(k)\n self.assignments[i] = k",
"def __setitem__(self, index, value):\n self.position[index] = value",
"def insert(self, k):\n node = super(AVL, self).insert(k)\n self.rebalance(node)",
"def __setitem__(self, idx, value):\n row, col = idx\n\n if row < 0 or row >= self.num_rows:\n raise IndexError(\"Row out of bounds\")\n\n if col < 0 or col >= self.num_cols:\n raise IndexError(\"Col out of bounds\")\n\n if value == self.default:\n del self[row, col]\n return\n\n array_row = self._find_row_before(row)\n\n if (array_row.next_row == None or array_row.next_row.row_number > row):\n new_row = SparseMatrix.MatrixRow()\n new_row.row_number = row\n new_row.next_row = array_row.next_row\n array_row.next_row = new_row\n\n sentinel_entry = SparseMatrix.MatrixEntry()\n new_row.row_sentinel = sentinel_entry\n\n array_row = array_row.next_row\n array_entry = self._find_column_before(array_row, col)\n\n if (array_entry == None or array_entry.next_entry == None or\n array_entry.next_entry.column_number > col):\n new_entry = SparseMatrix.MatrixEntry()\n new_entry.column_number = col\n if array_entry == None:\n new_entry.next_entry = None\n else:\n new_entry.next_entry = array_entry.next_entry\n array_entry.next_entry = new_entry\n\n array_entry = array_entry.next_entry\n array_entry.value = value",
"def __getitem__(self, k):\n if k < 0:\n k += len(self)\n if not 0 <= k < self._n:\n raise IndexError('invalid index')\n return self.store_array[k]",
"def put(self, key: int, value: int) -> None:\n index = key % self.size\n \n if self.table[index].value == None:\n self.table[index] = ListNode(key, value)\n return\n \n p = self.table[index]\n while p:\n if p.key == key:\n p.value = value\n return\n \n if p.next is None:\n break\n p = p.next\n p.next = ListNode(key,value)",
"def __setitem__(self, idx, value):\n assert(isinstance(idx, int))\n nidx = self._normalize_idx(idx)\n if nidx >= len(self.data):\n raise IndexError\n self.data[nidx] = value",
"def __setitem__(self, key, value):\n self._maps[0][key] = value",
"def update(self, d):\n for k in d:\n self[k] = d[k]",
"def test_swap_k_zero_k(self):\n \n nums = [1, 2, 3]\n k = 0\n\n a1.swap_k(nums, k)\n\n self.assertEqual(nums, nums)",
"def __setitem__(self, k, v):\n #if tree is empty\n if self.is_empty():\n # inherited from LinkedBinaryTree class\n # _Item(k, v) is inheritated from MapBase class\n leaf = self._add_root(self._Item(k,v)) \n else:\n p = self._subtree_search(self.root(), k)\n #if k is present in current tree\n if p.key() == k:\n #it's not p.value()!!\n p.element()._value = v\n self._rebalance_access(p)\n return\n #didn't find k in current tree; create a new object of Item\n # and add to either left or right of the last node searched\n else:\n item = self._Item(k, v)\n if k > p.key():\n leaf = self._add_right(p, item)\n else:\n leaf = self._add_left(p, item)\n self._rebalance_insert(leaf)",
"def setByPathAndIndex(self, keys, index, value):\n self.getByPath(keys[:-1])[keys[-1]][index] = value",
"def __getitem__(self, k):\n if not 0 <= k < self._size:\n raise IndexError( 'invalid index' )\n return self._Array[k] # retrieve from array",
"def touchKBucket(self, key):",
"def __setitem__(self, index: int, value: object) -> None:\n self.set_at_index(index, value)",
"def affect_model(model, lateral_index, lateral_value):\n model[lateral_index] = lateral_value\n return model",
"def update_kanda(self, kanda):\n\t\tself.subvarga = ''\n\t\tself.subvargaNum = 1\n\t\tself.varga = ''\n\t\tself.vargaNum = 1\n\t\tself.kanda = kanda\n\t\tself.kandaNum += 1",
"def update(\n self, index: Union[int, np.ndarray], value: Union[float, np.ndarray]\n ):\n\n tree_index = self.capacity + index\n self._tree[tree_index] = value\n\n # Propagate up the tree.\n parent = tree_index // 2\n while np.any(parent > 0):\n left = self._tree[2 * parent] # Children/sibling.\n right = self._tree[2 * parent + 1]\n # Note: Due to possible floating point error in the sum-tree case,\n # it's safer to recompute the parent nodes directly rather than to\n # accumulate an \"update\" up the tree which could be faster.\n self._tree[parent] = self.operation(left, right)\n parent = parent // 2",
"def updateRow(self, index: int) -> None:\n ...",
"def _put(self, key, value, current_node):\n pass",
"def set(self, index, data):\n self.data[index] = data",
"def swap_k(L, k):\n\n end = L[len(L)-k:]\n start = L[:k]\n\n for i in range(k):\n L[i] = end[i]\n L[(len(L)-k)+i] = start[i]",
"def __setitem__(self, index, value):\n if index == Ellipsis:\n index = tuple(self.dim*[slice(None)])\n\n if len(index) < self.dim:\n # --- Add extra dims to index if needed\n index = list(index)\n for i in range(len(index), self.dim):\n index.append(slice(None))\n index = tuple(index)\n\n if self.dim == 2:\n return self._setitem2d(index, value)\n elif self.dim == 3:\n return self._setitem3d(index, value)",
"def _set_tuple_k_structure(self, key):\n self.ks = [key[1]] if type(key[1]) == int else key[1]\n self.set_neighs(key[0])",
"def test_swap_k_one_size(self):\n \n nums = [5]\n k = 1\n\n a1.swap_k(nums, k)\n\n self.assertEqual(nums, [5])"
] | [
"0.69229925",
"0.6456467",
"0.6364326",
"0.62770015",
"0.6199403",
"0.60915345",
"0.6048265",
"0.6025952",
"0.60192573",
"0.59893894",
"0.59650415",
"0.59532386",
"0.5934357",
"0.59224254",
"0.58959097",
"0.5847799",
"0.5826606",
"0.5785241",
"0.577905",
"0.5765922",
"0.57567644",
"0.57482696",
"0.5742578",
"0.57411206",
"0.57360464",
"0.57339394",
"0.5730614",
"0.57084453",
"0.5708278",
"0.5704075",
"0.56933624",
"0.5692781",
"0.56629884",
"0.5655927",
"0.56485546",
"0.562961",
"0.5625167",
"0.5622636",
"0.5622098",
"0.5609173",
"0.56061655",
"0.56050146",
"0.5598849",
"0.55976516",
"0.55964273",
"0.55815846",
"0.55796444",
"0.5555767",
"0.55516434",
"0.5535834",
"0.5528076",
"0.55136424",
"0.5502298",
"0.54994833",
"0.5496048",
"0.54942685",
"0.54862404",
"0.5485328",
"0.5483212",
"0.54813707",
"0.54773873",
"0.5477152",
"0.5473861",
"0.5472684",
"0.5470049",
"0.5463108",
"0.5451346",
"0.5444985",
"0.54423094",
"0.5440524",
"0.5431885",
"0.54307395",
"0.5426078",
"0.5416167",
"0.5410879",
"0.5409699",
"0.54076636",
"0.53959537",
"0.5387053",
"0.5386815",
"0.5384325",
"0.53836685",
"0.53690404",
"0.5365705",
"0.5363426",
"0.5359674",
"0.5355141",
"0.53487736",
"0.53303164",
"0.532998",
"0.532929",
"0.5327732",
"0.5320863",
"0.53206545",
"0.53195846",
"0.53170973",
"0.53169173",
"0.5313966",
"0.53115296",
"0.53092474"
] | 0.7316253 | 0 |
recursive version, Ref. Ant Book p.155 | def query_recursive(self, p, q):
if q <= p:
return self.e()
return self._query_recursive(p, q, 0, 0, self.n) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def recursive():\n with Local() as tun:\n tun.call(recursive)",
"def lis_recursive(array):\n\n #TODO",
"def test_scan_recursive(self):\n self.run_scan(self.tempdir, self.root_fcount + self.nest_fcount + 1)",
"def test_level1_recursion(self):\n recursed = recurse_files('filename', self.files['filename'], self.files)\n self.assertEqual(recursed, [\"file7\", \"file2\", \"file3\"])",
"def isrecursive(object):\r\n return PrettyPrinter().isrecursive(object)",
"def test_level2_recursion(self):\n recursed = recurse_files('filename2', self.files['filename2'], self.files)\n self.assertEqual(recursed, [\"file7\", \"file2\", \"file3\", \"file6\"])",
"def fn(x):\n nonlocal ans \n if x < ans: \n if min(depth) == n: ans = x # all tiled\n else: \n i = min(depth)\n j = jj = depth.index(i) # (i, j)\n while jj < m and depth[jj] == depth[j]: jj += 1\n k = min(n - i, jj - j)\n for kk in reversed(range(1, k+1)): \n for jj in range(j, j+kk): depth[jj] += kk\n fn(x+1)\n for jj in range(j, j+kk): depth[jj] -= kk",
"def tri_recursion(k):\r\n if(k>0):\r\n result = k + tri_recursion(k-1)\r\n # print(result)\r\n else:\r\n result = 0\r\n\r\n return result",
"def getImmediateSubdirectories(dir):",
"def fn(k, i):\n ii = -1 \n for x in path:\n if gcd(nums[k], x) == 1: # coprime \n if path[x] and path[x][-1][1] > ii: \n ans[k] = path[x][-1][0]\n ii = path[x][-1][1]\n \n path.setdefault(nums[k], []).append((k, i))\n for kk in tree.get(k, []): \n if kk not in seen: \n seen.add(kk)\n fn(kk, i+1)\n path[nums[k]].pop()",
"def build(root):",
"def count_recursively(lst):\n\n if len(lst) == 0: \n return 0\n else: \n return 1 + count_recursively(lst[1:])",
"def __call__(self, node):\n if node.children:\n if len(node.children) == 1:\n if self.TagEqual(node.children[0], node):\n #print node.ToString()\n node.tag = self.Tag(node, node.children[0]);\n lst = node.children[0].children;\n node.children = lst;",
"def test_level3_recursion(self):\n recursed = recurse_files('filename3', self.files['filename3'], self.files)\n self.assertEqual(recursed, [\"file7\", \"file2\", \"file3\", \"file6\", \"file5\"])",
"def recursive(nums, left, right, target):\n return",
"def _union_find_rep(self, num, parents):\n rep, parent = num, parents[num]\n while parent != rep:\n rep = parent\n parent = parents[rep]\n # path compression\n temp, parent = num, parents[num]\n while parent != rep:\n parents[temp] = rep\n temp = parent\n parent = parents[temp]\n return rep",
"def getParents(obj):",
"def test_3_recursive(self):\n\n bronze = fmri.PkgFmri(self.published[4], None)\n\n # Retrieve bronze recursively to a directory, this should\n # also retrieve its dependency: amber, and amber's dependency:\n # tree.\n self.pkgrecv(self.durl1, \"--raw -r -k -d {0} {1}\".format(self.tempdir,\n bronze))\n\n amber = fmri.PkgFmri(self.published[1], None)\n tree = fmri.PkgFmri(self.published[5], None)\n\n # Verify that the manifests for each package was retrieved.\n for f in (amber, bronze, tree):\n mpath = os.path.join(self.tempdir, f.get_dir_path(),\n \"manifest\")\n self.assertTrue(os.path.isfile(mpath))",
"def find_all(st, sub):\n\n if not sub: return None\n if sub[0] not in st.root.trans: return None\n \n found, i, s = False, 0, st.root\n scaned = 0 # length of the scaned\n while True:\n k, p, s = s.trans[sub[i]]\n len1, len2 = p-k+1, len(sub)-i\n if len1 >= len2:\n if st.text[k:k+len2] == sub[i:]:\n found, scaned = True, scaned+len1\n break\n else:\n if st.text[k:k+len1] == sub[i:i+len1]:\n i, scaned = i+len1, scaned+len1\n else: break\n if found:\n # shift_of_suffix = len(st.text) - len(suffix)\n leaf_depthes = get_leaf_depthes(s)\n return [len(st.text)-x-scaned for x in leaf_depthes]\n\n return None",
"def permutations_recur(lst, i):\n\n pass # Replace this with your implementation of the function.",
"def step(tree):\n if type(tree) == list and type(tree[0]) == tuple:#This basically looks for any applications it can do directly. These applications are the ones where the function is already defined through abstraction. That's why it checks whether the first element of the list (for application) is an abstraction\n func = tree[0]#The whole function with parameter and body\n name = func[0][1]#Only the parameter\n func = func[1]#Only the body\n arg = tree[1]\n nfunc = replace(name, arg, func)#The replacement of all occurences of the parameter in the body with the argument\n return nfunc\n elif type(tree) == list:\n return [step(tree[0]), step(tree[1])]#recursive checking, again\n elif type(tree) == tuple:\n return (tree[0], step(tree[1]))\n else:\n return tree",
"def _tour(self,p,d,path):\n self._hook._previsit(p,d,path) # pre-visit p\n result = []\n path.append(0) # add new index to the end of path before recursion\n for c in self._tree.children(p):\n results.append(self._tour(c,d+1,path)) # recur on child's subtree\n path[-1] += 1 # increment index\n path.pop() # remove extraneous index from the end of the path\n answer = self._hook._postvisit(p,d,path,results) # post-visit p\n return answer",
"def len_link_recursive(s):\n if s == empty:\n return 0\n return 1 + len_link_recursive(rest(s))",
"def printFuncDependencyTreeRecursive(self, callStack, processedFuncIdxs=set()):\n assert callStack\n\n funcIdx = callStack[-1]\n depth = len(callStack) - 1\n print('--' * depth + str(self.getParentBaseFiles(funcIdx)) + ' ' + self.idx2Funcs[funcIdx])\n if funcIdx in processedFuncIdxs:\n # Base case 1: This function's call hierarchy has been processed (memoization)\n print('--' * (depth + 1) + '... (Truncated: Sub-hierarchy processed before)')\n callStack.pop(-1)\n return\n else:\n processedFuncIdxs.add(funcIdx)\n\n # Base case 2: This function doesn't have callers\n if funcIdx not in self.callee2caller:\n callStack.pop(-1)\n return\n else:\n callerIdxSet = self.callee2caller[funcIdx]\n assert callerIdxSet\n\n for callerIdx in callerIdxSet:\n if callerIdx not in callStack:\n callStack.append(callerIdx)\n self.printFuncDependencyTreeRecursive(callStack, processedFuncIdxs)\n else:\n # Base case 3: Recursion cycle detected\n # TODO: This base case may be redundant and never be reached because of base case 1. Can consider removing.\n print('--' * (depth + 1) + str(self.getParentBaseFiles(callerIdx)) + ' ' + self.idx2Funcs[callerIdx] + '(recursion)')\n callStack.pop(-1)\n return\n\n # Base case 4: Finished printing all callers\n callStack.pop(-1)\n return",
"def test_fibonacci_recursive(self):\r\n result = fib.fibonacci_recursive(5)\r\n self.assertEqual(result, 8)",
"def __recon_all_individual(sub, struct_dir, fs_sub_dir):\n T1_name = sub + '_T1w.nii.gz'\n\n if os.path.isfile(struct_dir + '/' + T1_name):\n os.system(f\"tcsh -c 'recon-all -i {struct_dir}/{T1_name} -s {sub} -all -parallel'\")\n else:\n print('no T1 found for ' + sub)\n\n this_sub_dir = f'{fs_sub_dir}/{sub}'\n return this_sub_dir",
"def get_change_recursive(m):\n if m < 0:\n raise ValueError(\"Invalid negative amount\")\n if m == 0:\n raise ValueError(\"ok 0 moves, but this shouldn't happen either.\")\n\n if m in [1, 3, 4]:\n return 1\n\n alt_paths = [get_change_recursive(m-1)]\n\n if m > 3:\n alt_paths.append(get_change_recursive(m-3))\n if m > 4:\n alt_paths.append(get_change_recursive(m-4))\n\n return min(alt_paths) + 1",
"def count_recursively(lst):\n\n if lst == []:\n return 0\n \n return 1 + count_recursively(lst[1:])",
"def recurse_tree(self, d, a, tail_top, tail_bottom):\n\n if self.optimal[d][a] == 0:\n self.relative_position = d - a\n\n else:\n tc = ''\n if d >= 0:\n tc = self.sequenceA[d-1]\n bc = ''\n if a >= 0:\n bc = self.sequenceB[a-1]\n\n if (self.direction[d][a] & self.LEFT) == self.LEFT: # If Left Arrow\n self.recurse_tree(d, a - 1, '-' + tail_top, bc + tail_bottom)\n\n if (self.direction[d][a] & self.DIAGONAL) == self.DIAGONAL: # If Diagonal Arrow\n self.recurse_tree(d - 1, a - 1, tc + tail_top, bc + tail_bottom)\n\n if (self.direction[d][a] & self.UP) == self.UP: # If Up Arrow\n self.recurse_tree(d - 1, a, tc + tail_top, '-' + tail_bottom)",
"def Children(self) -> _n_1_t_2:",
"def traverse(name, furtherPath):",
"def sprout_leaves(t, vals):",
"def reduce_rec(node):\n if node.is_leaf():\n return\n for edge in node.child_nodes:\n # replacing the subdiagram with a singular isomorphic one\n node.child_nodes[edge] = hashtable[node.child_nodes[edge].__hash__()]\n # and going down recursively along that subdiagram\n reduce_rec(node.child_nodes[edge])",
"def fn(x):\n trie.insert(x)\n for v, i in mp.get(x, []): ans[i] = trie.search(v)\n for xx in tree.get(x, []): fn(xx)\n trie.remove(x)",
"def resolve_all_refs_recursively(s):\n passes=0\n while s.resolve_all_refs()>0:\n passes=passes+1\n return passes",
"def dft_recursive(self, starting_vertex):\n \n visited = []\n\n def helper(vert, visited):\n visited.append(vert)\n print(vert)\n\n for child in self.vertices[vert]:\n if child not in visited:\n helper(child, visited)\n\n helper(starting_vertex, visited)",
"def simple_root(self, i):",
"def reduceDepthMain(lines):\r\n\r\n # generate depth tables for lines, a T table and Y table with depth\r\n main_flag = 0\r\n while True:\r\n elements = baseElementTables(lines)\r\n depth_list = find_depth(lines)\r\n \r\n least_depth = 100000 # placeholder\r\n chosen_line = -1\r\n # search for all gates that the depth is equal or more than the number of base \r\n # elements present. Among those valid ones, choose the one that has the least depth\r\n for i in range(len(lines)):\r\n if depth_list[i] >= len(elements[i]):\r\n if depth_list[i] < least_depth:\r\n least_depth = depth_list[i]\r\n chosen_line = i\r\n if chosen_line == -1: # if there is nothing to choose, break\r\n break\r\n if not reduceDepth(lines,lines[chosen_line]):\r\n break\r\n main_flag = 1\r\n # reorganise the order\r\n reorgOrder(lines)\r\n # renaming in order\r\n renaming(lines)\r\n return main_flag",
"def test_verify_recursive_and_transverse_acl_options():",
"def solution(s):",
"def fn(n):\n if n == 1: return [TreeNode()]\n ans = []\n for nn in range(1, n, 2): \n for left in fn(nn):\n for right in fn(n-1-nn): \n ans.append(TreeNode(left=left, right=right))\n return ans",
"def trie_recurse(wordinds, charinds, prefix, probs, cumul, trie, model, new_inp):\n num = 0\n for let in charinds.keys():\n new_inp[0][-1] = eye[charinds[let]]\n keys = trie.keys(prefix+let)\n num = len(trie.keys(prefix+let))\n if num == 1:\n final_probs[0][wordinds[keys[0]]] = np.multiply(cumul, probs[0][charinds[let]])\n elif num > 1:\n probs = model.predict(new_inp)\n new_inp = np.roll(new_inp, -1, 1)\n \n cumul = np.multiply(cumul, probs[0][charinds[let]])\n trie_recurse(wordinds, charinds, prefix+let, probs, cumul, trie, model, new_inp)",
"def li_recurse_start(self, lin):\n\n raise NotImplementedError(\"li_recurse_start() not implemented \"\n \"in superclass\")",
"def is_abecedarian_using_recursion(word):\n pass",
"def depthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n util.raiseNotDefined()",
"def getRoot(obj):",
"def _mutate_file(self, node, visited = set([])):\n for ch in self._get_children(node):\n\n if ch not in visited:\n visited.add(ch)\n\n try:\n self._mutate_node(ch)\n except Exception as e:\n print(e)\n\n # Recursion is a bitch\n self._mutate_file(ch, visited)",
"def path(g): #g: graph\n marked = set()\n nodes = set(g.nodes) \n output = list()\n def recursive(g):\n for i in nodes.copy():\n d = dependents(g,i)\n if (not d) or all(dd in marked for dd in d):\n output.append((i,g.nodes[i]['word']))\n marked.add(i)\n nodes.remove(i)\n if nodes==set([0]):\n break\n recursive(g)\n break\n recursive(g)\n return output",
"def recursive_sum(lst):\n\n if lst == []:\n return 0\n\n else:\n\n return lst[0] + recursive_sum(lst[1:])",
"def recursiveTraceJumptablese(ea, function=False):",
"def exercise_b2_107():\r\n pass",
"def levelorder_visit_recursive(t: Tree, act: Callable[[Tree], None]) -> None:\n if t.value is None:\n pass\n else:\n level = 0\n visited = visit_level(t, level, act)\n while visited > 0:\n level += 1\n visited = visit_level(t, level, act)",
"def C(relatorlist,quit_at=float('inf')):\n F,rels=fg.parseinputwords(relatorlist)\n if not all(r==F.cyclic_reduce(r) for r in rels):\n raise ValueError(\"Relators are not cyclically reduced.\")\n thepieces=pieces(rels)\n minnumberpieces=quit_at\n def min_string_piece_expression(whatsleft,thepieces,quit_at):\n # recursively determine the minimal expression of the string whatsleft as a concatenation of elements of thepieces, or stop once it is determined that any such expression requires at least quit_at many pieces\n # find a piece that agrees with a prefix of whatsleft and the recurse on the suffix\n if not whatsleft:\n return 0\n minexp=quit_at\n for p in thepieces:\n if p!=whatsleft[:len(p)]:\n continue\n else:\n minexp=min(minexp,1+min_string_piece_expression(whatsleft[len(p):],thepieces,minexp-1))\n return minexp\n def min_relator_piece_expression(relator,thepieces,quit_at):\n # This is first step in recursive search. Here we want to find a piece p such that for relator r we can write p=xy and r=yzx, with y nontrivial. That is, in this step only we think of r as cyclic word and allow first piece that wraps.\n r=relator()\n minexp=quit_at\n for p in thepieces:\n if len(p)>len(r):\n continue\n possiblestartingindices=[] # for given p there may be different possible choices of y\n for startingindex in range(len(r)-len(p)+1,len(r)+1):\n if p==(r+r)[startingindex:startingindex+len(p)]:\n possiblestartingindices.append(startingindex)\n if not possiblestartingindices:\n continue\n for startingindex in possiblestartingindices:\n # found a way to fit p into r spanning the beginning of r. This accounts for x and y part of r. Now recursively find shortest expression of z=whatsleft as a concatenation of pieces.\n whatsleft=(r+r)[startingindex+len(p):startingindex+len(r)]\n if not whatsleft:\n return 1\n else:\n minexp=min(minexp,1+min_string_piece_expression(whatsleft,thepieces,minexp-1))\n return minexp\n for thisrelator in rels:\n minnumberpieces=min(minnumberpieces,min_relator_piece_expression(thisrelator,thepieces,minnumberpieces))\n return minnumberpieces",
"def recursive(input):\n\n # Base Case: Argument input greater than 0.\n if input <= 0:\n return 0\n else:\n output = recursive(input - 1)\n print(output)",
"def walkthrough(software_map):\n\n for i in software_map:\n\n if not i[\"is_file\"]:\n\n # for each directory: make a index.md\n dname = \"./docs/\" + i[\"name\"]\n index = \"./docs/\" + i[\"name\"] + \"/index.md\"\n print(index)\n os.mkdir(dname)\n\n with open(index, \"w+\") as f:\n\n children = i[\"children\"]\n\n # list files\n f.write(\"Files:\\n\\n\")\n for i in children:\n if i[\"is_file\"]:\n\n fname = i[\"name\"]\n fext = fname.split(\".\")\n if len(fext) == 2:\n fext = fext[1]\n else:\n fext = \"none\"\n # for each file, note name and extension\n f.write(fname + \" : \" + fext + \"\\n\")\n\n # list subdirectories\n f.write(\"\\nSubdirectories:\\n\\n\")\n for i in children:\n if not i[\"is_file\"]:\n\n dirname = i[\"name\"]\n\n # note the number of files and subdirs in it\n num_files, num_dirs = 0, 0\n for child in i[\"children\"]:\n if child[\"is_file\"]:\n num_files += 1\n elif not child[\"is_file\"]:\n num_dirs += 1\n\n # note down name and numbers for each dir\n f.write(dirname + \" : \" + str(num_files) + \" files, \" +\n str(num_dirs) + \" directories\\n\")\n\n # goto subdir\n if len(i[\"children\"]) > 0:\n walkthrough(i[\"children\"])",
"def fn(n):\n while digraph.get(n, []): fn(heappop(digraph[n]))\n ans.appendleft(n)",
"def recursive_search(i, F, t, s, explored, leaders, order):\n x = len(explored)\n if x % 10 == 0:\n print(\"Length of explored: {}\".format(x))\n explored.append(i)\n if order == 2:\n leaders[i] = s\n arc_list = db.Database.find_one(collection=\"biggraph\", query={\"key\": i})\n if arc_list:\n for node in arc_list['value']:\n if node not in explored:\n F, t, leaders, explored = recursive_search(node, F, t, s, explored, leaders, order)\n if order == 1:\n t += 1\n F[i] = t\n return F, t, leaders, explored",
"def item_recurse(path, index):\n global count\n if index == len(item_keys):\n print(count,path,sep='')\n count +=1\n return\n for filename in db[item_keys[index]]['files']:\n newpath = str.join('-', (path, filename))\n item_recurse(newpath, index+1)",
"def exercise_b2_113():\r\n pass",
"def palindrome_recursive(a):\n # Base case\n if len(a) <= 1:\n return True\n else:\n if a[0] == a[len(a)-1]:\n return palindrome_recursive(a[1:-1])\n else:\n return False",
"def exercise_b2_53():\r\n pass",
"def depthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n \n util.raiseNotDefined()",
"def preRead(force,cat,path,depth = 0,retries = 0):\n limit = 6 # Lower limit for printing debug messages in this function\n root = None\n if cat == 'p':\n global people\n root = people\n elif cat == 'l':\n global places\n root = places\n elif cat == 'c':\n global cities\n root = cities\n elif cat == 's':\n global states\n root = states\n elif cat == 'o':\n global orgs\n root = orgs\n elif cat == 'i':\n global items\n root = items\n else:\n print \"ERR: Invalid category %s passed to markChanged.\" % cat\n if not root:\n print \"preRead: Invalid category %s?\" % cat\n return False\n if depth > len(path): depth = len(path)\n if depth > 7: depth = 7\n if path[0] in root.keys():\n if depth <= 1:\n return True\n if path[1] in root[path[0]].keys():\n if depth <= 2:\n return True\n if path[2] in root[path[0]][path[1]].keys():\n if depth <= 3:\n return True\n if path[3] in root[path[0]][path[1]][path[2]].keys():\n if depth <= 4:\n return True\n if path[4] in root[path[0]][path[1]][path[2]][path[3]].keys():\n if depth <= 5:\n return True\n if path[5] in root[path[0]][path[1]][path[2]][path[3]][path[4]].keys():\n if depth <= 6:\n return True\n if path[6] in root[path[0]][path[1]][path[2]][path[3]][path[4]][path[5]].keys():\n return True # Maximum depth reached\n elif force:\n root[path[0]][path[1]][path[2]][path[3]][path[4]][path[5]][path[6]] = {}\n if retries >= depth: force = False\n return preRead(force,cat,path,depth,retries + 1)\n else: # Not found, and not forcing it to be found\n if config['debug'] > limit: debugPath(root,path)\n return False\n elif force:\n root[path[0]][path[1]][path[2]][path[3]][path[4]][path[5]] = {}\n if retries >= depth: force = False\n return preRead(force,cat,path,depth,retries + 1)\n else: # Not found, and not forcing it to be found\n if config['debug'] > limit: debugPath(root,path)\n return False\n elif force:\n root[path[0]][path[1]][path[2]][path[3]][path[4]] = {}\n if retries >= depth: force = False\n return preRead(force,cat,path,depth,retries + 1)\n else: # Not found, and not forcing it to be found\n if config['debug'] > limit: debugPath(root,path)\n return False\n elif force:\n root[path[0]][path[1]][path[2]][path[3]] = {}\n if retries >= depth: force = False\n return preRead(force,cat,path,depth,retries + 1)\n else: # Not found, and not forcing it to be found\n if config['debug'] > limit: debugPath(root,path)\n return False\n elif force:\n root[path[0]][path[1]][path[2]] = {}\n if retries >= depth: force = False\n return preRead(force,cat,path,depth,retries + 1)\n else: # Not found, and not forcing it to be found\n if config['debug'] > limit: debugPath(root,path)\n return False\n elif force:\n root[path[0]][path[1]] = {}\n if retries >= depth: force = False\n return preRead(force,cat,path,depth,retries + 1)\n else: # Not found, and not forcing it to be found\n if config['debug'] > limit: debugPath(root,path)\n return False\n else: # First level (fileid) can't be generated.\n if config['debug'] > limit: debugPath(root,path)\n return False",
"def _downstream_helper(self, n):\n children = [self._downstream_helper(node) \n for node in self.get_successors(n)]\n return {n : children} if children else {n : []}",
"def Trees__LCA_LowestCommonDenominator():\n # Python2 ported to Python3 via 2to3-3.7\n # URL:# URL:https://www.hackerrank.com/challenges/binary-search-tree-lowest-common-ancestor/problem\n '''\n class Node:\n def __init__(self,info): \n self.info = info \n self.left = None \n self.right = None \n // this is a node of the tree , which contains info as data, left , right\n '''\n def lca(root, v1, v2):\n # Find a and b. Link child nodes to parent to be able to backtrack.\n # (1) Note, we add 'parent' attribute to node dynamically via node.parent = ...\n root.parent = None\n node_stack = []\n node_stack.append(root)\n v1_node, v2_node = None, None\n while node_stack:\n node = node_stack.pop()\n if not v1_node and node.info == v1:\n v1_node = node\n if not v2_node and node.info == v2:\n v2_node = node\n for child_node in [node.left, node.right]:\n if child_node:\n child_node.parent = node # (1)\n node_stack.append(child_node)\n\n # Generate path from A to root.\n curr = v1_node\n a_to_root = set()\n while curr:\n a_to_root.add(curr.info)\n curr = curr.parent\n\n # traverse up b until you come across an element in a's path to parent.\n curr = v2_node\n while curr:\n if curr.info in a_to_root:\n return curr\n else:\n curr = curr.parent\n\n print(\"Shouldn't be here, Something went wrong\")\n\n # # Recursive. (Iterative is better, but did recursive for practice.) ~15 min.\n # # Main idea is that we count the number of v1/v2's found of the subnodes.\n # # If a node has sum of 2, we know it's the lca.\n # def lca(root, v1, v2):\n # def lca_helper(node):\n # ret_node = None\n # if not node:\n # return 0, None\n # v_match_counter = 0\n # if node.info in [v1, v2]:\n # v_match_counter += 1\n # left_count, left_node_ret = lca_helper(node.left)\n # right_count, right_node_ret = lca_helper(node.right)\n # v_match_counter += left_count + right_count\n # if v_match_counter == 2:\n # ret_node = node\n # if left_node_ret:\n # ret_node = left_node_ret\n # if right_node_ret:\n # ret_node = right_node_ret\n # return v_match_counter, ret_node\n\n # _, node = lca_helper(root)\n # return node",
"def exercise_b2_69():\r\n pass",
"def rec_search_backway(route, prv):\n if not prv[route]:\n return [[route]]\n else:\n s = []\n for r in prv[route]:\n b = rec_search_backway(r, prv)\n #print b \n for t in b:\n t.append(route)\n s.extend(b)\n return s",
"def _for_process_and_descendants(function, proc):\n return (function(proc) +\n sum(function(child)\n for child in proc.get_children(recursive=True)))",
"def reroot(*args, **kwargs)->None:\n pass",
"def _add_recurse(self, path_name, dot_name):\n # self.logger.debug(\n # \"_add_recurse({0},{1})\".format(path_name, dot_name))\n\n added_count = 0\n if os.path.isdir(path_name):\n # then is module, such as xmlrpc, with includes:\n # network/tcp_echo/xmlrpc/__init__.py\n # network/tcp_echo/xmlrpc/client.py\n # network/tcp_echo/xmlrpc/server.py\n self.logger.debug(\"Recurse into directory ({})\".format(path_name))\n\n dir_list = os.listdir(path_name)\n for name in dir_list:\n if name == \"__pycache__\":\n self.logger.debug(\n \" skip known skipper ({})\".format(name))\n continue\n\n if name == \"test\":\n self.logger.debug(\n \" skip known skipper ({})\".format(name))\n continue\n\n if name[0] == \".\":\n self.logger.debug(\n \" skip pattern skipper ({})\".format(name))\n continue\n\n # still here, see if file or subdirectory\n file_name = os.path.join(path_name, name)\n if os.path.isdir(file_name):\n # then another sub-directory\n added_count += self._add_recurse(\n file_name, dot_name + '.' + name)\n\n else: # assume is a file?\n # for example, name=client.py\n if name.endswith(\".py\"):\n self.dep_list.append(file_name)\n added_count += 1\n try:\n self.logger.debug(\n \"Recurse into s-file ({})\".format(file_name))\n self.add_file_dependency(file_name)\n\n except FileNotFoundError:\n self.logger.error(\n \"Could NOT find above dependency within\" +\n \"({})\".format(file_name))\n # sys.exit(EXIT_CODE_MISSING_DEP)\n\n else:\n # expects network.tcp_echo.xmlrpc.something.txt\n value = path_name + os.sep + name\n self.logger.debug(\n \"Add file as dependency({})\".format(value))\n self.dep_list.append(value)\n added_count += 1\n\n else:\n # might be file, like network/tcp_echo/ftplib.py as\n # network.tcp_echo.ftplib\n if not path_name.endswith(\".py\"):\n path_name += \".py\"\n self.logger.debug(\"Recurse into d-file ({})\".format(path_name))\n self.add_file_dependency(path_name)\n\n return added_count",
"def getHierarchy(unique_name):",
"def getHierarchy(unique_name):",
"def getHierarchy(unique_name):",
"def question4(T,r,n1,n2):\n\n\tif(len(T)<=1):\t\t\t\t\t\t\t\t# Edge case : If the Tree only consists of a root and no children\n\t\treturn -1\n\n\tif(n1==None or n2==None):\t\t\t\t\t# Edge case : If n1 and n2 are not actually numbers\n\t\treturn -1\n\n\tlen_T = len(T)\n\tif(not n1 < len_T or not n2 < len_T):\t\t# Edge case : If the nodes gives in parameters do not actually exist in the tree\n\t\treturn -1\n\n\tn1_list = []\t\t\t\t\t\t\n\tn2_list = []\n\n\tfor i in range(len(T)):\t\t\t\t\t\t# Traverse the list and append all the parents of node1 if found in O(N)\n\t\tif T[i][n1]==1:\n\t\t\tn1_list.append(i)\n\n\tfor i in range(len(T)):\t\t\t\t\t\t# Traverse the list and append all the parents of node2 is found in O(N)\n\t\tif T[i][n2]:\n\t\t\tn2_list.append(i)\n\n\t\t\t\t\t\t\t\t\t\t\t\t# The root is a common ancestor of every node in the tree\n\tif not r in n1_list:\t\t\t\t\t\t# check if the root is in the list, if not, add it\n\t\tn1_list.append(r)\n\n\tif not r in n2_list:\t\t\t\t\t\t# check if the root is in the list, if not, add it\n\t\tn2_list.append(r)\n\n\tn1_list = reversed(n1_list)\t\t\t\t\t# Since we are operating on a binary tree, we sort\n\tfor i in n1_list:\t\t\t\t\t\t\t# in decending order to operate on the latest nodes\n\t\tif i in n2_list:\t\t\t\t\t\t# if a match is found, we know that it is the lowest common ancestor\n\t\t\treturn i \t\t\t\t\t\t\t# If nothing is found, the root node is bound to be returned. And it correct.",
"def part1(input_lines):\n # This is a DAG problem. We need to form a dependency graph.\n tower = get_tower(input_lines)\n return find_root(tower)",
"def primenumbers(number, recurv, templist):\n for j in recurv:\n templist = [i for i in templist if i % j != 0]\n\n for x in templist:\n if not recurv:\n if number % x == 0:\n recurv.append(x)\n primenumbers(number, recurv, templist)\n if number % x == 0 and x > recurv[-1]:\n recurv.append(x)\n '''while tempnumber != 1 and tempnumber in recurv:\n if tempnumber % x == 0:\n tempnumber = number / x\n recurv.append(x)'''\n answer = functools.reduce(lambda x, y: x * y, recurv)\n print('answer:')\n print(answer)\n print(number)\n if answer == number:\n print('worked')\n break\n else:\n print('hi')\n primenumbers(number, recurv, templist)\n return recurv",
"def partial_tree(s, n):\n if n == 1:\n return (Tree(s.first), s.rest)\n elif n == 2:\n return (Tree(s.first, [Tree(s.rest.first)]), s.rest.rest)\n else:\n left_size = (n-1)//2\n right_size = n - left_size - 1\n \"*** YOUR CODE HERE ***\"",
"def dirtree(dir, index):\n filenames = os.listdir(dir)\n for filename in filenames:\n if not os.path.isdir(os.path.abspath(dir+'/'+filename)):\n if filename == filenames[-1]:\n print('| '*index+'\\--', filename)\n else:\n print('| '*index+'|--', filename)\n else:\n print('| '*index+'|--', filename)\n dir = dir + '/' + filename\n dirtree(dir, index+1)",
"def recursive_unpack(self):\n\n def _genflatten(lst):\n if not lst:\n return []\n ##\n if isinstance(lst[0], Assembly):\n lst = lst[0].unpack()\n ##\n for elem in lst:\n if isinstance(elem, Assembly):\n apos = elem.GetPosition()\n asum = np.sum(apos)\n for x in elem.unpack():\n if asum:\n yield x.clone().shift(apos)\n else:\n yield x\n else:\n yield elem\n\n return list(_genflatten([self]))",
"def exercise_b2_106():\r\n pass",
"def test_augassign_recursion():\n # infinitely recurses in python\n code = \"\"\"\n def rec():\n a = 0\n a += rec()\n return a\n rec()\n \"\"\"\n cls_node = extract_node(code)\n assert next(cls_node.infer()) is util.Uninferable",
"def _find_one_tree(tree: dict,\n func: Callable,\n args: Tuple,\n kwargs: Mapping,\n ) -> Union[dict, None]:\n frontier = []\n explored = set()\n for uid, item in tree.items():\n frontier.append((uid, item))\n while frontier:\n uid, item = frontier.pop()\n explored.add(uid)\n if func(item, *args, **kwargs):\n return item\n if \"children\" in item:\n for child_uid, child_item in item[\"children\"].items():\n if child_uid not in explored:\n frontier.append((child_uid, child_item))",
"def test_path1():\n path = [(0,0,1)]\n path.append([('A',4,0)])\n path.append((0,1,1))\n path.append([('A',3,0)])\n path.append((0,1,1))\n path.append([('A',2,0)])\n path.append((0,1,1))\n path.append([('A',1,0)])\n\n execute_path(path, True)",
"def zig_zag_traversal(root):\n return",
"def checkSumWalk(top=\".\", func=checkSumHelper):\n values = []\n os.path.walk( top, checkSumHelper, values )\n return sum(values)",
"def __call__(self, node):\n if not node.children: return;\n if len(node.children) <= 2: return;\n if self.IsGoodTriple(node.children): return;\n if len(node.children) >= 8: raise ValueError(\"Too long to decompose\");\n children = map(lambda x : [self.GetLabel(x)], node.children);\n #print \"Guessing %s\" % children;\n print node.ToPrettyString();\n res = self.path_finder.FindPath(children, self.GetLabel(node));\n if len(res) != 0:\n print res[0];\n tnodes, count = self.Transform(res[0][1], node, 0);\n node.children = tnodes.children;\n else:\n raise ValueError(\"Find no production chains to decompose for %s\" % children);\n print node.ToPrettyString();",
"def getrecursionlimit(): # real signature unknown; restored from __doc__\n pass",
"def traverse_directory(args) :\n siteRGX = re.compile('DPH.'+args.site.upper())\n s = []\n\n # report non-unique residuals\n for root, dirs, files in os.walk(args.traverse):\n path = root.split('/')\n for gamitFile in files:\n if siteRGX.search(gamitFile):\n gamitFile = root+'/'+gamitFile\n #check for potential duplicates in the same path, only want to use one of the DOH files\n if len(path[-1]) > 4:\n regex = re.compile(root[:-2])\n else:\n regex = re.compile(root)\n\n\n # only check for duplicates when there is more than one network\n # being processed...\n if args.network == 'yyyy_dddnN':\n if len(s) == 0:\n s.append(gamitFile)\n else:\n # for each element in s, check to see if the root path does not match\n # any of the files already stored in the list\n m = 0\n for item in s:\n if regex.search(item) :\n m = 1\n if not m :\n s.append(gamitFile)\n else:\n s.append(gamitFile)\n\n s.sort()\n lines = ''\n # Now loop through each file and consolidate the residuals\n for dfile in s :\n dphs = res.parseDPH(dfile)\n\n # check if the dph files are being searched are from\n #a GAMIT network of type yyyy/dddn?/\n root, filename = os.path.split(dfile)\n if args.network == 'yyyy_dddnN':\n ddd = root[-5:-2]\n year = int(root[-10:-6])\n startDT = dt.datetime(year,01,01)\n startDT = startDT + dt.timedelta(days=(int(ddd) -1))\n elif args.network == 'ddd':\n ddd = root[-3:]\n year = root[-8:-4] \n startDT = dt.datetime(int(year),01,01)\n startDT = startDT + dt.timedelta(days=(int(ddd) -1))\n\n line = res.consolidate(dphs,startDT)\n lines = lines + line\n\n # if its larger than 1GB dump it to a file\n # this is designed to keep the load n the file system lighter\n if sys.getsizeof(lines) > 1073741824 :\n f = gzip.open(args.save_file,'a',9)\n f.write(lines)\n f.close()\n lines = ''\n #print(lines)\n\n # dump any remaining memory to file\n f = gzip.open(args.save_file,'a',9)\n f.write(lines)\n f.close()\n lines = ''\n\n return",
"def run_recursion(bags, data, rules, total):\n bag_added = False\n for line in data:\n for item in line[1]:\n for bag in bags:\n if bag == item[0] and line[0] not in bags:\n bags.append(line[0])\n bag_added = True\n # # Part 2\n # for rule in rules:\n # if bag == rule[0]:\n # # print(item)\n # total += int(item[1]) * int(rule[1]) # TOO HIGH\n # Try to reverse the recursion. Instead of how many bags can fit a BAG_TYPE in them,\n # how many bags can fit inside a BAG_TYPE\n # print(set(bags), len(set(bags)))\n if BAG_TYPE in bags:\n bags.remove(BAG_TYPE) # We remove the bag in question because it cannot contain itself\n if bag_added is True:\n run_recursion(bags, data, rules, total)\n return bags, len(set(bags))",
"def rameaux(p):\r\n return list(set([Father(x) for x in uc1_leafy(p) if Father(x) != None]))",
"def dfs(node: TreeNode):\n if not node:\n return\n helper(node, 0, sum)\n dfs(node.left)\n dfs(node.right)",
"def postorder_recursive(self, start, path):\n if start:\n path = self.postorder_recursive(start.left_child, path)\n path = self.postorder_recursive(start.right_child, path)\n path += (str(start.value) + \"--\")\n return path",
"def dfs_recursive(self, starting_vertex, destination_vertex):\n pass # TODO",
"def dfs_recursive(self, starting_vertex, destination_vertex):\n pass # TODO",
"def walk(tree):\n results = []\n for parent, children in tree.iteritems():\n perms = permute(children)\n temp = deepcopy(perms)\n # Add the parent to the perms as well\n for perm in temp:\n perms.append(add_to_head_of_perm(parent, perm))\n for perm in perms:\n r = pairwise(perm)\n results.append(r)\n return results",
"def depthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n fringeList = util.Stack()\n print \"fringeList\",fringeList\n closedList = {str(problem.getStartState()): ([])} #Hash Map to maintain state to path\n print \"closed list:\", closedList\n isGoalStateArrived = False\n\n # Push start state into fringeList\n fringeList.push((problem.getStartState()))\n\n while not isGoalStateArrived and not fringeList.isEmpty():\n currentNode = fringeList.pop()\n print \"currentNode\",currentNode\n currentNodePath = closedList[str(currentNode)]\n print \"currentNodepath:\",currentNodePath\n # Explore children\n childrenOfCurrentNode = problem.getSuccessors(currentNode)\n print \"childrenOfCurrentNode:\",childrenOfCurrentNode\n for childNode in childrenOfCurrentNode:\n if str(childNode[0]) not in closedList:\n path = copy.copy(currentNodePath)\n path.append(childNode[1])\n print \"child [0] %s, child [1] %s\", childNode[0],childNode[1]\n print \"path \", path\n fringeList.push(childNode[0])\n closedList[str(childNode[0])] = path # Put parent node in closed List\n if problem.isGoalState(childNode[0]):\n isGoalStateArrived = True\n goalState = childNode[0]\n break\n\n if isGoalStateArrived:\n #print closedList[str(problem.getStartState())]\n return closedList[str(goalState)]\n \"util.raiseNotDefined()\"",
"def run_standard(items):\n global counter\n counter = 0\n print\n print \"Standard recursive version\"\n print \"Position with\", items, \"items is\", evaluate_position(items)\n print \"Evaluated in\", counter, \"calls\"",
"def fn(i):\n if len(stack) == k: return ans.append(stack.copy())\n for ii in range(i+1, n+1): \n stack.append(ii)\n fn(ii)\n stack.pop()",
"def depthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n \n from game import Directions\n visited = set() # unique elements\n state = problem.getStartState()\n #returns starting agent's position\n waiting_list = util.Stack()\n # LIFO\n # last in first out\n # parents = collections.defaultdict(collections.UserDict)\n parents = {}\n #dictionary\n sequence = []\n #LIFO\n for action in problem.getSuccessors(state):\n # in order to push full-state values\n waiting_list.push(action)\n # enumarating tuple\n\n while not waiting_list.isEmpty():\n state = waiting_list.pop()\n \n visited.add(state[0])\n # node is visited and we wont visit those nodes\n \n for substate in problem.getSuccessors(state[0]):\n # take a look to successors of current node\n \n if substate[0] not in visited:\n # if not in visited \n # saving parents\n parents[substate[0]]={'parent':state} \n # generate new node\n waiting_list.push(substate)\n # push to stack\n if problem.isGoalState(substate[0]): \n target_state = substate \n #finding wayback\n\n\n while target_state[0] in parents.keys():\n temp=parents[target_state[0]]['parent']\n sequence.append(target_state[1])\n target_state = temp\n sequence.append(target_state[1])\n return sequence[::-1]",
"def transitive_reduction_helper(self, node, visited, path):\n\n visited.add(node)\n path.append(node)\n \n for i in self.suffix[node].copy(): \n self.transitive_reduction_helper(i, visited, path)\n\n # determine if the prefix can be reduced\n for i in self.prefix[path[-1]].copy():\n # if j is in the path and not the previous node\n if i in path and i is not path[-2]:\n self.remove_edge(i, path[-1]) \n \n \n path.remove(node)\n return visited",
"def _xmodule_recurse(item, action, ignore_exception=()):\r\n for child in item.get_children():\r\n _xmodule_recurse(child, action, ignore_exception)\r\n\r\n try:\r\n return action(item)\r\n except ignore_exception:\r\n return"
] | [
"0.64885056",
"0.6223022",
"0.60090613",
"0.5734339",
"0.55864066",
"0.55643034",
"0.5554218",
"0.55151844",
"0.55021846",
"0.5453173",
"0.54176205",
"0.5387818",
"0.538237",
"0.536489",
"0.53377",
"0.5313174",
"0.5310064",
"0.5292833",
"0.5277829",
"0.5275689",
"0.52746475",
"0.5240773",
"0.52360195",
"0.52348673",
"0.5228656",
"0.5228011",
"0.5224013",
"0.5207873",
"0.52013063",
"0.52002347",
"0.51893336",
"0.5188486",
"0.5183741",
"0.51810026",
"0.51726395",
"0.51719797",
"0.51656294",
"0.5155811",
"0.51282424",
"0.5120558",
"0.51140124",
"0.510847",
"0.5098898",
"0.5096708",
"0.50835747",
"0.5078588",
"0.5078171",
"0.5066181",
"0.5063658",
"0.505624",
"0.5052928",
"0.5050475",
"0.50504",
"0.504715",
"0.50469536",
"0.5045855",
"0.5043991",
"0.50391346",
"0.50390524",
"0.5027638",
"0.50242007",
"0.5023364",
"0.50229377",
"0.5013864",
"0.50091296",
"0.5008809",
"0.49968845",
"0.49944818",
"0.4974667",
"0.49728313",
"0.497232",
"0.497232",
"0.497232",
"0.4963423",
"0.49606222",
"0.49596238",
"0.49573842",
"0.4955572",
"0.49494314",
"0.49488142",
"0.4946",
"0.49435776",
"0.49390277",
"0.49387988",
"0.49376115",
"0.4937089",
"0.49363336",
"0.4934303",
"0.49311155",
"0.4930348",
"0.4928943",
"0.49227932",
"0.49200842",
"0.49200842",
"0.49189684",
"0.4918789",
"0.49171895",
"0.49167562",
"0.49162185",
"0.4912877",
"0.49107522"
] | 0.0 | -1 |
Returns the number at the nth position in the Fibonacci sequence | def fib(n):
fib = [0, 1]
if n > 2:
for i in range(n):
fib.append(fib[-1] + fib[-2])
return fib[n-1]
else:
return fib[n-1] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_fib(position):\n\n # Base Case: Positions greater thatn 0 or 1, since Fibonacci for 0 is 0 and\n # 1 is 1.\n if position == 0 or position == 1:\n return position\n\n return get_fib(position - 1) + get_fib(position - 2)",
"def fast_fibonacci(n):\n return _fast_fibonacci(n)[0]",
"def fibonacci(n):",
"def next_fib(f):\n for f in fib:\n i = fib.index(f)\n return f+fib[i-1]",
"def nthFibonacci(n):\n\n # Run some basic error checking\n try:\n n = int(n)\n except: # if this fails not a number inputed\n sys.stderr.write('Incorrect data input\\n')\n return None\n if n < 0:\n sys.stderr.write('Only positive integers allowed\\n')\n return None\n \n # since the error checking slows down the recursion we run it as a seperate function\n [Fnm,Fn] = fastrecursivefibonacci(n)\n return Fnm",
"def fibonacci(n):\n if n == 0:\n return 0\n elif n == 1:\n return 1\n else:\n nth = fibonacci(n-1) + fibonacci(n-2)\n return nth",
"def fibi(n):\n a, b = 0, 1\n for i in range(n):\n # fibonacci series is next no. is sum of previous two number.\n temp = a\n a = b\n # now nth fibonacci no. is sum of previous two number.\n b = temp+b\n # returning a because a changing each places\n return a",
"def fibonacci(n):\n\tfib_seq = []\n\tnth_term = 0\n\t\n\tfor i in range(0,n+1):\n\t\tif i == 0:\n\t\t\tfib_seq.append(0)\n\t\tif i == 1:\n\t\t\tfib_seq.append(1)\n\t\tif i > 1:\n\t\t\tnth_term = fib_seq[-1] + fib_seq[-2]\n\t\t\tfib_seq.append(nth_term)\n\t\n\tprint(fib_seq)\n\tprint(fib_seq[n])\n\treturn(fib_seq[n])",
"def fibonacci_iterative(nth_nmb: int) -> int:\n old, new = 0, 1\n if nth_nmb in (0, 1):\n return nth_nmb\n for __ in range(nth_nmb - 1):\n old, new = new, old + new\n return new",
"def find_fib(n):\n # fibo = 2.078087 * math.log(n) + 1.672276\n return 0 # fibo",
"def fib(n): #Describe \"n\" as a variable in fib sequence\n while n == 0:\n return 0 #establish that 0 position is equal to 0\n if n == 1:\n return 1\n else:\n return fib(n-1) + fib(n-2)",
"def fibonacci(n):\r\n\r\n if n in past_fib:\r\n return past_fib[n]\r\n \r\n if n == 0 or n == 1:\r\n past_fib[n] = 1\r\n return 1\r\n\r\n total = fibonacci(n-1) + fibonacci(n-2)\r\n past_fib[n] = total\r\n return total",
"def fibonacci_number(n):\r\n l = [0, 1] \r\n for i in range(n - 1):\r\n l = [*l, l[-1] + l[-2]]\r\n return l[n - 1]",
"def fib(index):\n return round((GR**index)/R5)",
"def fib(n:int) -> int:\n if n<= 2:\n return 1\n else:\n return fibonacci.fib(n-1) + fibonacci.fib(n-2)",
"def fib(n):\n if n in (0, 1): return n\n return fib(n-1) + fib(n-2)",
"def fibi(n):\n if n == 0: return 0\n if n == 1: return 1\n f_n2, f_n1 = 1, 1\n for i in range(3, n+1):\n f_n2, f_n1 = f_n1, f_n2+f_n1\n return f_n1",
"def fibonacci(n):\n sequence = [0, 1]\n for i in range(n + 1):\n value = add(sequence[-2], sequence[-1])\n sequence.append(value)\n return sequence[n]",
"def fibonacci(n):\n if n in (0, 1):\n return n\n return fibonacci(n - 2) + fibonacci(n - 1)",
"def fibi(n: int) -> int:\n if n == 0:\n return 0\n if n == 1:\n return 1\n f_n2, f_n1 = 1, 1\n for _ in range(3, n+1):\n f_n2, f_n1 = f_n1, f_n2+f_n1\n return f_n1",
"def fib(n):\n if n < 2:\n return n\n else:\n return fib(n-1) + fib(n-2)",
"def fibonacci(n):\n if n in (0, 1):\n return n\n return (fibonacci(n-2) + fibonacci(n-1))",
"def fib(n):\n if n == 0 or n == 1:\n return n\n else:\n return fib(n-2) + fib(n-1)",
"def fibonacci(n):\n if n in (0,1):\n return n\n\n return (fibonacci(n-2) + fibonacci(n-1))",
"def fib(n: int) -> int:\n if n == 0: return 0\n if n == 1: return 1\n return fib(n-1) + fib(n-2)",
"def fib(n):\n n = int(n)\n if n <= 1:\n return 1\n\n return fib(n-1) + fib(n-2)",
"def fibonacci(n):\n\n if n == 0:\n return 0\n if n == 1:\n return 1\n \n previous = 0\n current = 1\n\n for i in range(n-1):\n previous, current = current, current + previous\n\n return current",
"def fib(i):\n if i < 2: return 1\n return fib(i-1) + fib(i-2)",
"def fibonacci(n):\n if n < 2:\n return n\n return fibonacci(n - 1) + fibonacci(n - 2)",
"def fibonacci(n):\n if n < 2:\n return n\n return fibonacci(n-1) + fibonacci(n-2)",
"def fibonacci(n):\n if n < 2:\n return n\n return fibonacci(n-1) + fibonacci(n-2)",
"def fibonacciN(n):\n previous, current = 1, 1\n k = 2\n while k < n:\n previous, current = current, previous + current\n k += 1\n return current",
"def fibo_element(n):\n f = ()\n if n < 0:\n print(\"Incorrect number\")\n elif n == 0:\n return 0\n elif n == 1:\n return 1\n else:\n return fibo_element(n-1) + fibo_element(n-2)",
"def fib(n):\n print(\"fib({})\".format(n))\n if(n <= 2):\n return 1\n else:\n return fib(n-1) + fib(n-2)",
"def fib(n):\n if n == 1:\n return 1\n else:\n return n + fib(n-1)",
"def fib(n):\n i = 0\n j = 1\n n = n - 1\n\n while n >= 0:\n i, j = j, i + j\n n = n - 1\n return i",
"def fib (n):\r\n if n == 0 or n == 1:\r\n return 1\r\n else:\r\n return fib(n-1) + fib(n-2)",
"def fib(n):\n if n == 0: return 0\n if n == 1: return 1\n return fib(n-1) + fib(n-2)",
"def func(n):\n if n not in fib:\n fib[n] = func(n-1) + func(n-2)\n return fib[n]",
"def fibonacci_term(n):\n return int(((1+sqrt(5))**n-(1-sqrt(5))**n)/(2**n*sqrt(5)))",
"def fib(f1, index):\n if index == 0:\n return 0\n if index == 1:\n return f1\n if (f1, index) not in FIBS_CACHE:\n FIBS_CACHE[f1, index] = fib(f1, index-1) + fib(f1, index-2)\n return FIBS_CACHE[f1, index]",
"def fibonacci(n):\n fibval = sum_series(n, 0, 1)\n print(fibval)\n return fibval",
"def fib(n):\n if n == 0:\n return 0\n elif n == 1:\n return 1\n else:\n return fib(n - 2) + fib(n - 1)",
"def fib(n: int) -> int:\n if n == 0:\n return 0\n if n == 1:\n return 1\n return fib(n-1) + fib(n-2)",
"def fibonacci(n):\n if n <= 1:\n return n \n else:\n return fibonacci(n-1) + fibonacci(n-2)",
"def fib3(n):\n if n < 2:\n return n\n return fib1(n-1) + fib1(n-2)",
"def fib(n):\n if n == 1 or n == 2:\n result = 1\n else:\n result = fib(n-1) + fib(n-2)\n return result",
"def fibonacci (n):\n\tif n == 0:\n\t\treturn 0\n\telif n == 1:\n\t\treturn 1\n\telse:\n\t\treturn fibonacci(n-2) + fibonacci(n-1)",
"def fibonacci1(n):\n if n in (0, 1):\n return n\n return fibonacci1(n - 2) + fibonacci1(n - 1)",
"def fib(n): \n if n == 0:\n return 0\n elif n == 1:\n return 1\n\n else:\n return fib(n-1) + fib(n-2)",
"def fib(n):\n if n == 0:\n return 0\n elif n == 1:\n return 1\n else:\n return fib(n - 1) + fib(n - 2)",
"def fib(number: int) -> int:\n return next(islice(generator(number), number, number + 1))",
"def fib(n):\n i = 0\n j = 1\n n = n - 1\n\n while n >= 0:\n i, j = j, i + j\n n = n - 1\n \n return i",
"def fib(n):\n i = 0\n j = 1\n n = n - 1\n\n while n >= 0:\n i, j = j, i + j\n n = n - 1\n \n return i",
"def fib(n):\n i = 0\n j = 1\n n = n - 1\n\n while n >= 0:\n i, j = j, i + j\n n = n - 1\n \n return i",
"def fib(n): # this line defines the function 'fib' where n is the input value\n i = 0\n j = 1\n n = n - 1\n\n while n >= 0:\n i, j = j, i + j\n n = n - 1\n \n return i",
"def fibonacci(n):\n print(n)\n if n == 0 or n == 1:\n return 1\n\n return fibonacci(n - 1) + fibonacci(n - 2)",
"def fibonacci(n):\n\tif n == 0:\n\t\treturn 0\n\telif n == 1:\n\t\treturn 1\n\telse:\n\t\treturn fibonacci(n-1) + fibonacci(n-2)",
"def fibonacci(n):\n if n == 1:\n return 0\n elif n == 2:\n return 1\n else:\n return fibonacci(n-2) + fibonacci(n-1)",
"def fib(n):\n if n==1 or n==2:\n return 1\n else:\n return fib(n-1)+fib(n-2)",
"def fibo(n):\r\n if n==1:\r\n return 0\r\n elif n==2:\r\n return 1\r\n else:\r\n return fibo(n-1)+fibo(n-2)",
"def fibonacci(n):\n\n if n <= 1:\n return n\n else:\n return (fibonacci(n-1) + fibonacci(n-2))",
"def last_fib_digit(n):\n\n # global seq\n seq = []\n seq.append(1)\n seq.append(1)\n\n if n <= 2:\n return(1)\n\n for i in range(2, n):\n seq.append(last_digit(seq[i-1] + seq[i-2]))\n\n return seq[n-1]",
"def fib_formula(n):\n if n <= 1:\n return n\n else:\n return (fib_formula(n - 1) + fib_formula(n - 2))",
"def fibonacci(n):\n\n if (n == 0):\n return 0\n elif (n == 1):\n return 1\n else:\n return fibonacci(n - 1) + fibonacci(n - 2)",
"def fibonacci(n):\n if n == 0:\n return 0\n elif n == 1:\n return 1\n else:\n return fibonacci(n - 1) + fibonacci(n - 2)",
"def fib(k):\n if k == 1:\n return 0\n previous, current = 0, 1 # current is the second Fibonacci number.\n for _ in range(k-2):\n previous, current = current, previous + current\n return current",
"def get_fibonacci_last_digit_fast(n):\n fibonacci = [0 for i in range(n + 1)]\n fibonacci[1] = 1\n\n for i in range(2, n + 1):\n fibonacci[i] = (fibonacci[i - 1] + fibonacci[i - 2]) % 10\n\n return fibonacci[n]",
"def fibonacci_n(n):\n sqrt5 = math.sqrt(5)\n phi = (1 + sqrt5) / 2\n psi = (1 - sqrt5) / 2\n return (phi**n - psi**n) // sqrt5",
"def fib_iterative(n: int) -> int:\n if n < 0:\n raise ValueError\n number1 = 0\n number2 = 1\n counter = 1\n while counter < n:\n counter += 1\n number1, number2 = number2, number1 + number2\n return number2",
"def fibonacci(n):\n\n if n == 1:\n v = 0\n elif n == 2:\n v = 1\n else:\n v = fibonacci(n - 2) + fibonacci(n - 1)\n\n return v",
"def fib(n):\n a, b = 1, 1\n while n:\n a, b = b, a + b\n n -= 1\n return a",
"def fibonacci_recursive(nth_nmb: int) -> int:\n def fib(_n):\n return _n if _n <= 1 else fib(_n - 1) + fib(_n - 2)\n return fib(nth_nmb)",
"def fibonacci(number: int) -> int:\n fibs = [0] * (number + 2)\n fibs[0] = 0\n fibs[1] = 1\n for i in range(2, number + 1):\n fibs[i] = fibs[i - 1] + fibs[i - 2]\n return fibs[number]",
"def fibonacci(self, n):\n\n if n == 1:\n return 1\n elif n <= 0:\n return 0\n else:\n return self.fibonacci(n - 1) + self.fibonacci(n - 2)",
"def fibonacci(n):\n if n == 0:\n return 0\n elif n == 1:\n return 1\n else:\n return (fibonacci(n-1) + fibonacci(n-2))",
"def fib(N):\n sqrt5 = math.sqrt(5)\n phi = (sqrt5 + 1) / 2\n return int(round(math.pow(phi, N) / sqrt5))",
"def fib_efficient(number: int) -> int:\n if number >= 0:\n return calculate(number)[0]\n return -calculate(-number)[0] if not number % 2 else calculate(-number)[0]",
"def fib_iterative(n: int) -> int:\n print(n)\n return 0",
"def fibonacci(n):\n\n if n == 1:\n return 1\n elif n < 1:\n return 0\n else:\n return fibonacci(n-1) + fibonacci(n-2)",
"def fibonacci(n):\n if n==0 :\n return 0\n elif n==1:\n return 1\n else:\n return fibonacci(n-1) + fibonacci(n-2)",
"def fibonacci0(n):\n assert n == int(n) and n > 0\n if n in [1, 2]:\n return 1\n return fibonacci0(n-1) + fibonacci0(n-2)",
"def fibonacci(n: int) -> int:\n m = 1 << (n.bit_length() - 1)\n Fn = 0\n Fnm1 = 1\n while m:\n Fn2 = Fn * Fn\n Fn = 2 * Fnm1 * Fn + Fn2\n Fnm1 = Fnm1 * Fnm1 + Fn2\n if n & m:\n Fnm1, Fn = Fn, Fnm1 + Fn\n m >>= 1\n return Fn",
"def fibonacci_iter(n):\n f = []\n for x in range(n + 1):\n if x == 0:\n f.append(x)\n elif x == 1:\n f.append(x)\n else:\n f.append(f[-1] + f[-2])\n return f[-1]",
"def fib1(n):\n if n < 2:\n return n\n return fib1(n-1) + fib1(n-2)",
"def fibonacci(n):\n assert n >= 0 and int(\n n) == n, 'n has to be greater than or equal to 0 and has to be an integer'\n if n in [0, 1]:\n return n\n else:\n return fibonacci(n - 1) + fibonacci(n - 2)",
"def fibonacci(n):\n if n < 0:\n raise ValueError(\"n cannot be negative\")\n elif n < 2:\n return n\n else:\n a, b = 0, 1\n for _ in range(n):\n a, b = b, add(a, b)\n return a",
"def fibonacci0(n):\n assert n == int(n) and n > 0\n if n in [1, 2]:\n return 1\n return fibonacci0(n-1) + fibonacci0(n-2)",
"def optimized_fibonacci(f):\n a = 0\n b = 1\n if f < 2:\n return f\n else:\n for i in range(1, f):\n c = a + b\n a = b\n b = c\n return b",
"def fibonacci(n):\n k, m = 1, 1\n\n if n < 2:\n return n\n\n for i in range(2, n):\n k, m = m, k + m\n\n return m",
"def get_fibonacci_number(self, n) -> int:\n\n if not isinstance(n, int) or n < 0:\n raise ValueError(\"n must be a non-negative integer\")\n\n if n in self.known_cache:\n return self.known_cache[n]\n\n # Without caching known Fibonacci numbers like this, this function\n # will generate a \"maximum recursion depth exceeded\" error\n # (when for sufficiently large Fibonacci numbers).\n # That's because Python doesn't do tail recursion elimination.\n self.known_cache[n] = self.get_fibonacci_number(\n n - 1) + self.get_fibonacci_number(n - 2)\n\n return self.known_cache[n]",
"def fibonacci():\n return sum_series(a=0, b=1)",
"def fibonacci(num):\n if num == 0 or num ==1:\n return 1\n return fibonacci(num-1) + fibonacci (num-2)",
"def fib_cached(i):\n if i < 2: return 1\n return fib_cached(i-1) + fib_cached(i-2)",
"def fib(N):\n fibs = [0, 1]\n \n for i in range(2, N + 1):\n fibs.append(fibs[i-1]+fibs[i-2])\n \n return fibs[N]",
"def fibonacci(a):\n fib = [1,1]\n x = 0\n i = 1\n while x < a:\n x = fib [i] + fib[i-1]\n i += 1\n fib.append(x)\n return i, fib",
"def fibclassic (n):\n\n return 1 if n < 3 else fibclassic (n-2) + fibclassic (n-1)",
"def fibonacci(n):\n\n ## Auxiliary functions for working in our polynomial ring.\n def poly_sqr((a, b)):\n a2 = a*a\n return 2*a*b + a2, a2 + b*b\n def poly_mul((a, b), (c, d)):\n ac = a*c\n return a*d + b*c + ac, ac + b*d\n\n ## Do the job. For negative indices, we take powers of t^{-1}.\n if n < 0: return power((1, -1), -n, (0, 1), poly_sqr, poly_mul)\n else: return power((1, 0), n, (0, 1), poly_sqr, poly_mul)",
"def fib(n):\n if n < 0:\n raise ValueError\n\n if n > 1:\n return fib(n - 1) + fib(n - 2)\n\n return n",
"def _fast_fibonacci(n):\n if n == 0:\n return (0, 1)\n else:\n a, b = _fast_fibonacci(n // 2)\n c = a * (b * 2 - a)\n d = a**2 + b**2\n if n % 2 == 1:\n return (d, c + d)\n else:\n return (c, d)"
] | [
"0.7997541",
"0.79853135",
"0.7872735",
"0.78044224",
"0.7801214",
"0.77594495",
"0.77388746",
"0.7687958",
"0.76830304",
"0.7671803",
"0.7670554",
"0.7667316",
"0.76668704",
"0.76389676",
"0.7613944",
"0.7574987",
"0.7570288",
"0.7553658",
"0.75406444",
"0.75240415",
"0.7495268",
"0.74817663",
"0.7472793",
"0.74636066",
"0.7462618",
"0.74594903",
"0.74577117",
"0.7451156",
"0.7443849",
"0.74411345",
"0.74411345",
"0.74403507",
"0.7432474",
"0.7427094",
"0.7423931",
"0.74163985",
"0.74160504",
"0.73998916",
"0.7384285",
"0.73833966",
"0.7383058",
"0.738153",
"0.7380685",
"0.73737407",
"0.7373291",
"0.7358031",
"0.7352766",
"0.7350639",
"0.7339848",
"0.7338277",
"0.7337642",
"0.7336772",
"0.7332001",
"0.7332001",
"0.7332001",
"0.73276067",
"0.73265314",
"0.7317672",
"0.7315097",
"0.7311718",
"0.73109025",
"0.7309287",
"0.73027474",
"0.7285881",
"0.7285514",
"0.728483",
"0.727337",
"0.72644824",
"0.72612846",
"0.72581434",
"0.72557443",
"0.72483873",
"0.7243663",
"0.7241483",
"0.72350144",
"0.7217404",
"0.7214072",
"0.7209021",
"0.72067463",
"0.720536",
"0.72016925",
"0.7197951",
"0.71871513",
"0.7186196",
"0.71811193",
"0.71783274",
"0.71520865",
"0.714938",
"0.7122236",
"0.7116892",
"0.7116187",
"0.71133816",
"0.71128774",
"0.7103699",
"0.7101196",
"0.70904464",
"0.70834714",
"0.7067898",
"0.7065282",
"0.706366"
] | 0.74367195 | 32 |
Returns a readbyread fastQ parser analogous to file.readline(). | def __init__(self,filePath,headerSymbols=['@','+']):
if filePath.endswith('.gz'):
self._file = gzip.open(filePath)
else:
self._file = open(filePath, 'rU')
self._currentLineNumber = 0
self._hdSyms = headerSymbols | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def readfq(fp): # this is a generator function\n last = None # this is a buffer keeping the last unprocessed line\n while True: # mimic closure; is it a bad idea?\n if not last: # the first record or a record following a fastq\n for l in fp: # search for the start of the next record\n if l[0] in '>@': # fasta/q header line\n last = l[:-1] # save this line\n break\n if not last: break\n name, seqs, last = last[1:].partition(\" \")[0], [], None\n for l in fp: # read the sequence\n if l[0] in '@+>':\n last = l[:-1]\n break\n seqs.append(l[:-1])\n if not last or last[0] != '+': # this is a fasta record\n yield name, ''.join(seqs), None # yield a fasta record\n if not last: break\n else: # this is a fastq record\n seq, leng, seqs = ''.join(seqs), 0, []\n for l in fp: # read the quality\n seqs.append(l[:-1])\n leng += len(l) - 1\n if leng >= len(seq): # have read enough quality\n last = None\n yield name, seq, ''.join(seqs); # yield a fastq record\n break\n if last: # reach EOF before reading enough quality\n yield name, seq, None # yield a fasta record instead\n break",
"def _parse_fastq(f):\n header = ''\n seq = ''\n skip = False\n for line in f:\n if skip:\n skip = False\n continue\n line = line.strip()\n if line == '':\n continue\n if line[0] == '@':\n header = line.replace('@', '')\n elif line[0] == '+':\n yield header, seq\n skip = True\n else:\n seq = line.upper()",
"def fastq_reader(fastq):\n group_gen = grouper(fastq, 4)\n for record in group_gen:\n # drop the @ before the name and any text after a whitespace\n name = record[0].split(' ')[0][1:].strip()\n seq = record[1].strip()\n yield name, seq",
"def read_fastq(filename, strip_second_header=True):\n\n with open(filename) as fastq:\n line = fastq.readline()\n if not line.startswith(\"@\"):\n raise IOError(\"Not FASTQ format? First line didn't start with @\")\n while fastq:\n if line.startswith(\"@\"):\n header = line.rstrip()\n seq = fastq.readline().rstrip()\n second_header = fastq.readline()\n if strip_second_header:\n second_header = \"+\"\n scores = fastq.readline().rstrip()\n yield header, seq, second_header, scores\n elif line == \"\": # EOF\n yield header, seq, second_header, scores\n break\n line = fastq.readline()",
"def read(infile):\n if isinstance(infile, str):\n infile = open(infile)\n\n with infile:\n while True:\n cmt = infile.readline().strip()\n seq = infile.readline().strip()\n plus = infile.readline().strip()\n qual = infile.readline().strip()\n\n if not cmt:\n break\n if not cmt.startswith('@') or plus != '+':\n raise ValueError('fastq file <{}> is corrupted'.format(infile.path))\n yield SRecord(cmt=cmt[1:], seq=seq, qual=qual)",
"def parse(line: str) -> \"SeqNode\":\n return MiniParser(Lexer(line)).parse()",
"def readFastq(filename):\n sequences = []\n qualities = []\n \n with open(filename) as fh:\n while True:\n fh.readline() # skip name line\n seq = fh.readline().rstrip() #read base sequence\n fh.readline() # skip placeholder line\n qual = fh.readline().rstrip() # base quality line\n if len(seq) == 0:\n break\n sequences.append(seq)\n qualities.append(qual)\n \n return sequences, qualities",
"def readFastq(filename):\n\tsequences = []\n\tqualities = []\n\twith open(filename, 'r') as f:\n\t\twhile True: \n\t\t\tf.readline() # skip name line\n\t\t\tseq = f.readline().rstrip()\n\t\t\tf.readline() # skip place holder line \n\t\t\tq = f.readline().rstrip()\n\t\t\tif len(seq) ==0:\n\t\t\t\tbreak \n\t\t\tsequences.append(seq)\n\t\t\tqualities.append(q)\n\treturn sequences, qualities",
"def FastqIterator(fh):\n def readTotitle(fh, titleChar):\n \"\"\"returns a tuple ([lines before the next title line], next tile line)\n \"\"\"\n preLines = []\n while True:\n l = fh.readline().strip()\n if l.startswith(titleChar):\n return (preLines,l)\n elif l == '':\n return preLines,None\n else:\n preLines.append(l)\n\n if type(fh) in StringTypes:\n fh = file(fh)\n\n preLines,nextTitleLine =readTotitle(fh,'@')\n\n while nextTitleLine != None:\n seqTitle = nextTitleLine[1:].rstrip()\n preLines,nextTitleLine=readTotitle(fh,'+')\n qualTitle = nextTitleLine[1:].rstrip()\n if len(qualTitle.strip()) > 0 and seqTitle != qualTitle:\n print seqTitle\n print preLines\n print qualTitle\n raise hmmErrors.InvalidFastq, \"Error in parsing: @title sequence entry must be immediately followed by corresponding +title quality entry.\"\n seqLines = preLines\n qualLines = []\n for i in range(len(seqLines)): # Quality characters should be the same length as the sequence\n qualLines.append( fh.readline().strip() )\n\n preLines,nextTitleLine=readTotitle(fh,'@')\n\n yield (seqTitle, ''.join(seqLines), ''.join(qualLines))",
"def parseFasta(fh):\n\n record_seq = []\n record_id = None\n\n for line in fh:\n line = line.strip(\"\\n\")\n\n if line.startswith(\">\"):\n\n if record_seq:\n yield Record(record_id, \"\".join(record_seq))\n\n record_id = line[1:].split()[0]\n record_seq = []\n else:\n record_seq.append(line.replace(\"*\", \"-\"))\n\n if record_seq:\n yield Record(record_id, \"\".join(record_seq))",
"def stream_fastq(fqfile):\n\n if fqfile.endswith('.gz'):\n qin = gzip.open(fqfile, 'rb')\n else:\n qin = open(fqfile, 'r')\n\n while True:\n header = qin.readline()\n if not header:\n break\n header = header.strip()\n seqidparts = header.split(' ')\n seqid = seqidparts[0]\n seq = qin.readline()\n seq = seq.strip()\n qualheader = qin.readline()\n qualscores = qin.readline()\n qualscores = qualscores.strip()\n header = header.replace('@', '', 1)\n yield seqid, header, seq, qualscores",
"def test_fast_reader():\n text = \"a b c\\n1 2 3\\n4 5 6\"\n with pytest.raises(ParameterError): # C reader can't handle regex comment\n ascii.read(text, format=\"fast_basic\", guess=False, comment=\"##\")\n\n # Enable multiprocessing and the fast converter\n try:\n ascii.read(\n text,\n format=\"basic\",\n guess=False,\n fast_reader={\"parallel\": True, \"use_fast_converter\": True},\n )\n except NotImplementedError:\n # Might get this on Windows, try without parallel...\n if os.name == \"nt\":\n ascii.read(\n text,\n format=\"basic\",\n guess=False,\n fast_reader={\"parallel\": False, \"use_fast_converter\": True},\n )\n else:\n raise\n\n # Should raise an error if fast_reader has an invalid key\n with pytest.raises(FastOptionsError):\n ascii.read(text, format=\"fast_basic\", guess=False, fast_reader={\"foo\": True})\n\n # Use the slow reader instead\n ascii.read(text, format=\"basic\", guess=False, comment=\"##\", fast_reader=False)\n # Will try the slow reader afterwards by default\n ascii.read(text, format=\"basic\", guess=False, comment=\"##\")",
"def parse_fastq(filepath):\n if REGEX_GZIPPED.match(filepath):\n logging.debug('Opening \"%s\" as gzipped file', filepath)\n # using os.popen with zcat since it is much faster than gzip.open or gzip.open(io.BufferedReader)\n # http://aripollak.com/pythongzipbenchmarks/\n # assumes Linux os with zcat installed\n import os\n with os.popen('zcat < {}'.format(filepath)) as f:\n yield from _parse_fastq(f)\n else:\n with open(filepath, 'r') as f:\n yield from _parse_fastq(f)",
"def parse_fastq (rec_lines):\n data = []\n data.append(rec_lines[0][1:])\n data.append(rec_lines[1])\n data.append(rec_lines[3])\n return data",
"def fastaNext(fh, buffer):\n id = ''\n documentation = ''\n sequence = ''\n\n # if buffer is empty read a line\n if buffer:\n line = buffer\n buffer = ''\n else:\n line = ''\n for line in fh:\n if line.isspace():\n continue\n else:\n break\n\n # not successful in finding a header line, must be end of file\n if not line:\n return id, documentation, sequence, buffer\n\n # get the ID and documentation from the doc line\n line = line.rstrip()\n try:\n id, documentation = line.split(\" \", maxsplit=1)\n except ValueError:\n # if documentation is missing, split fails\n # print('fastaNext - documentation is missing')\n id = line\n\n id = id.lstrip('> ')\n\n # read the sequence, since the id and documentation are already parsed, it doesn't need to be\n # done here\n for line in fh:\n if line.isspace():\n # skip blank lines\n continue\n\n line = line.rstrip() # remove newline\n # remove N and *\n line = line.replace('N', '')\n line = line.replace('*', '')\n\n if line.startswith('>'):\n # start of next sequence\n buffer = line\n break\n\n else:\n sequence += line\n\n return id, documentation, sequence, buffer\n\n # End of fastaNext",
"def readline(self) -> str | None:",
"def read(self, *args) -> \"PyObject *\":\n return _ida_fpro.qfile_t_read(self, *args)",
"def next(self):\n lines = []\n query = False\n while 1:\n line = self._uhandle.readline()\n if not line:\n break\n # If I've reached the next one, then put the line back and stop.\n if lines and (line.startswith('BLAST')\n or line.startswith('BLAST', 1)\n or line.startswith('<?xml ')):\n self._uhandle.saveline(line)\n break\n # New style files ommit the BLAST line to mark a new query:\n if line.startswith(\"Query=\"):\n if not query:\n if not self._header:\n self._header = lines[:]\n query = True\n else:\n #Start of another record\n self._uhandle.saveline(line)\n break\n lines.append(line)\n\n if query and \"BLAST\" not in lines[0]:\n #Cheat and re-insert the header\n #print \"-\"*50\n #print \"\".join(self._header)\n #print \"-\"*50\n #print \"\".join(lines)\n #print \"-\"*50\n lines = self._header + lines\n \n if not lines:\n return None\n \n data = ''.join(lines)\n if self._parser is not None:\n return self._parser.parse(File.StringHandle(data))\n return data",
"def readfast(self, name=\"\", *args, **kwargs):\n\n assert _os.path.isfile(self.__str__()) == True\n\n with open(self.__str__(), *args, **kwargs) as file_handler:\n for line in file_handler:\n yield line",
"def fasta_iter(handle, parse_description=False, line=None):\n if line is None:\n line = handle.readline()\n\n while line:\n data = {}\n\n line = to_str(line.strip())\n if not line.startswith('>'):\n msg = f\"Bad FASTA format: no '>' at beginning of line: {line}\"\n raise IOError(msg)\n\n if parse_description: # Try to grab the name and optional description\n try:\n data['name'], data['description'] = line[1:].split(' ', 1)\n except ValueError: # No optional description\n data['name'] = line[1:]\n data['description'] = ''\n else:\n data['name'] = line[1:]\n data['description'] = ''\n\n data['name'] = data['name'].strip()\n data['description'] = data['description'].strip()\n\n # Collect sequence lines into a list\n sequenceList = []\n line = to_str(handle.readline())\n while line and not line.startswith('>'):\n sequenceList.append(line.strip())\n line = to_str(handle.readline())\n\n data['sequence'] = ''.join(sequenceList)\n yield Record(**data)",
"def readline(self) -> Optional[str]:",
"def readline(self): \n\t\tif not self._input: raise PlumberExceptions.PipeTypeException(self)\n\t\tif self.eof(): return None\n\t\tret = \"\"\n\t\twhile not self.eof():\n\t\t\tbuf = self.read()\n\t\t\tif not buf:\n\t\t\t\tif not self.eof():\n\t\t\t\t\tself._state.unread(buf)\n\t\t\t\t\treturn \"\"\n\t\t\t\telse:\n\t\t\t\t\treturn None\n\t\t\tnl = self._nl_pattern.search(buf)\n\t\t\tif nl:\n\t\t\t\tret = ret + buf[:nl.span()[1]]\n\t\t\t\tself.unread(buf[nl.span()[1]:])\n\t\t\t\treturn ret\n\t\t\telse:\n\t\t\t\tret = ret + buf\n\t\treturn ret",
"def fasta_iter_py3(fasta_name):\n rec = None\n for line in open(fasta_name, \"r\"):\n if line[0] == \">\":\n if rec:\n yield rec\n rec = FastaRecord(line.strip()[1:])\n else:\n rec.sequence += line.strip()\n\n if rec:\n yield rec",
"def parse(self, parser):\n with self.reading:\n chunks = [self.read_buffer.dequeue() or (yield self.base.read(self.bufsize))]\n try:\n while True:\n tupe, result = parser.__parser__()(chunks[-1], False)\n if tupe & ParserResult.DONE:\n value, chunk, _ = result\n del chunks[:]\n self.read_buffer.enqueue(chunk)\n do_return(value)\n elif tupe & ParserResult.PARTIAL:\n parser = result\n chunks.append((yield self.base.read(self.bufsize)))\n else:\n raise ParserError(result)\n except BrokenPipeError:\n # try to terminate parser with last chunk\n tupe, result = parser.__parser__()(b'', True)\n if tupe & ParserResult.DONE:\n value, chunk, _ = result\n del chunks[:]\n self.read_buffer.enqueue(chunk)\n do_return(value)\n raise\n finally:\n for chunk in chunks:\n self.read_buffer.enqueue(chunk)",
"def _readline(self) -> Text:\n try:\n return self.stream.readline().decode(\"utf-8\").strip()\n except OSError: # pragma: no cover\n return \"\"",
"def parser(path):\n\t\n\tdata = Arff()\n\tdata.read_arff(path)\n\t\n\treturn data",
"def readline(self):\n try:\n return self.queue.get_nowait()\n except Empty:\n return None",
"def parse_fasta(f, trim_desc=False):\n \n f = iter(f)\n desc = next(f).strip()[1:]\n if trim_desc:\n desc = desc.split()[0]\n seq = StringIO()\n for line in f:\n line = line.strip()\n if line.startswith(\">\"):\n yield desc, seq.getvalue()\n desc = line[1:]\n if trim_desc:\n desc = desc.split()[0]\n seq = StringIO()\n else:\n seq.write(line.replace(\" \", \"\").replace(\"U\", \"T\"))\n yield desc, seq.getvalue()",
"def fasta_read_generator(file_handler):\r\n seq = []\r\n name = ''\r\n for line in file_handler:\r\n if line[0] == '>':\r\n sequence = ''.join(seq)\r\n if name: # only yield when we already have all data for the first sequence\r\n yield name, sequence\r\n name = line.rstrip()[1:] # omitting the leading >\r\n seq = []\r\n else:\r\n seq += [line]#.rstrip()] # keep line breaks\r\n sequence = ''.join(seq)\r\n yield name, sequence # don't forget the last sequence\r",
"def read_from_readline_interface(self, readline, filename=None, compat_mode=False):\n # Todo: Compat mode arg handling could be cleaned up in this method and class.\n if compat_mode:\n self.compat_mode = compat_mode\n tok_generator = call_tokenize(readline)\n\n self.token_list = []\n nesting_level = 0\n lower_nest_level = False\n for tok in tok_generator:\n if lower_nest_level:\n nesting_level -= 1\n lower_nest_level = False\n if tok[1] in self.nest_open:\n nesting_level += 1\n elif tok[1] in self.nest_close:\n lower_nest_level = True # Lower for next token.\n\n self.token_list.append(Token(tok, nesting_level=nesting_level,\n filename=filename, compat_mode=self.compat_mode))",
"def read_fasta(fp):\n name, seq = None, []\n for line in fp:\n line = line.rstrip()\n if line.startswith(\">\"):\n if name: yield (name, ''.join(seq))\n name, seq = line, []\n else:\n seq.append(line)\n if name: yield (name, ''.join(seq))",
"def read(self, f):\n return self.parse(f.read())",
"def fasta_reader(fasta):\n # ditch the boolean (x[0]) and just keep the header/seq grouping\n fa_iter = (x[1] for x in itertools.groupby(fasta, lambda line: line[0] == \">\"))\n for header in fa_iter:\n # drop the \">\"\n name = next(header)[1:].strip()\n # join all sequence lines to one by iterating until the next group.\n read = \"\".join(s.strip() for s in next(fa_iter))\n yield name, read",
"def readFastaFile(filename):\n if os.path.exists(filename)==False:return {}\n sequences={}\n fhr=open(filename,\"r\")\n for line in fhr:\n if line[0]==\">\":\n sequences[line.strip()[1:].split()[0]]=fhr.readline().strip()\n fhr.close()\n return sequences",
"def acqparser(self):\n if getattr(self, \"_acqparser\", None) is None:\n self._acqparser = AcqParserFIF(self.info)\n return self._acqparser",
"def __next__(self):\n # ++++ Get Next Four Lines ++++\n elemList = []\n for i in range(4):\n line = self._file.readline()\n self._currentLineNumber += 1 ## increment file position\n if line:\n elemList.append(line.strip('\\n'))\n else: \n elemList.append(None)\n \n # ++++ Check Lines For Expected Form ++++\n trues = [bool(x) for x in elemList].count(True)\n nones = elemList.count(None)\n # -- Check for acceptable end of file --\n if nones == 4:\n raise StopIteration\n # -- Make sure we got 4 full lines of data --\n assert trues == 4,\\\n \"** ERROR: It looks like I encountered a premature EOF or empty line.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._currentLineNumber)\n # -- Make sure we are in the correct \"register\" --\n assert elemList[0].startswith(self._hdSyms[0]),\\\n \"** ERROR: The 1st line in fastq element does not start with '%s'.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._hdSyms[0],self._currentLineNumber) \n assert elemList[2].startswith(self._hdSyms[1]),\\\n \"** ERROR: The 3rd line in fastq element does not start with '%s'.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._hdSyms[1],self._currentLineNumber) \n # -- Make sure the seq line and qual line have equal lengths --\n assert len(elemList[1]) == len(elemList[3]), \"** ERROR: The length of Sequence data and Quality data of the last record aren't equal.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._currentLineNumber) \n \n # ++++ Return fatsQ data as tuple ++++\n return tuple(elemList)",
"def read_line(f):\n buf = ''\n while not buf.endswith(\"\\n\"):\n r, w, e = select.select([f], [], [])\n if r:\n nextbyte = f.read(1)\n if not nextbyte:\n return ''\n buf += nextbyte\n else:\n break\n return buf",
"def readstring(self, fstring):\n return self.parse(fstring)",
"def prepare_reader(self,\n filename_queue,\n max_quantized_value=2,\n min_quantized_value=-2):\n reader = tf.TFRecordReader()\n _, serialized_example = reader.read(filename_queue)\n\n return self.prepare_serialized_examples(serialized_example,\n max_quantized_value, min_quantized_value)",
"def prepare_reader(self,\n filename_queue,\n max_quantized_value=2,\n min_quantized_value=-2):\n reader = tf.TFRecordReader()\n _, serialized_example = reader.read(filename_queue)\n\n return self.prepare_serialized_examples(serialized_example,\n max_quantized_value, min_quantized_value)",
"def read(self, *args, **kwargs):\r\n buf = io.BufferedReader.read(self, *args, **kwargs)\r\n self.increment(len(buf))\r\n return buf",
"def from_fp(*args) -> \"qfile_t *\":\n return _ida_fpro.qfile_t_from_fp(*args)",
"def readFasta(self, fastaFile):\t\n\t\tname, seq = None, []\n\t\tfor line in fastaFile:\n\t\t\tline = line.rstrip()\n\t\t\tif (line.startswith(\">\")):\n\t\t\t\tif name: yield (name, ''.join(seq))\n\t\t\t\tname, seq = line, []\n\t\t\telse:\n\t\t\t\tseq.append(line)\n\t\tif name: yield (name, ''.join(seq))",
"def parse_quilfile(filename: str) -> inst.Program:\n input_stream = FileStream(filename)\n return _parse(input_stream, filename)",
"def process_fastq(fastq_file):\n current_record = {}\n\n for name, seq, blank, quality in zip(*[iter(fastq_file)]*4):\n current_record['name'] = name.strip('\\n')\n current_record['seq'] = seq.strip('\\n')\n current_record['quality'] = quality.strip('\\n')\n\n yield current_record",
"def read_lazy(self):\n self.process_rawq()\n return self.read_very_lazy()",
"def read_some(self):\n self.process_rawq()\n while not self.cookedq and not self.eof:\n self.fill_rawq()\n self.process_rawq()\n buf = self.cookedq\n self.cookedq = b''\n return buf",
"def read():\n # TODO",
"def read(self):\r\n return RecordIO.Reader.do_read(self._fp, self._codec)",
"def read_fasta(file_path=\"\"):\n\n line = \"\"\n\n try:\n fasta_handle = open(file_path,\"r\")\n except:\n raise IOError(\"Your input FASTA file is not right!\")\n\n # make sure the file is not empty\n while True:\n line = fasta_handle.readline()\n if line == \"\":\n return\n if line[0] == \">\":\n break\n\n # when the file is not empty, we try to load FASTA file\n while True:\n if line[0] != \">\":\n raise ValueError(\"Records in Fasta files should start with '>' character\")\n title = line[1:].rstrip()\n lines = []\n line = fasta_handle.readline()\n while True:\n if not line:\n break\n if line[0] == \">\":\n break\n lines.append(line.rstrip())\n line = fasta_handle.readline()\n\n yield title,\"\".join(lines).replace(\" \",\"\").replace(\"\\r\",\"\")\n\n if not line:\n return\n\n fasta_handle.close()\n assert False, \"Your input FASTA file have format problem.\"",
"def parse(handle):\n while True:\n record = __read(handle)\n if not record:\n break\n yield record",
"def test_iter_fastq(self):\r\n from StringIO import StringIO\r\n fasta = \"\"\">M32Nstr_1 039732_1312_3088 orig_bc=CTCGTGGAGTAG new_bc=CTCGTGGAGTAG bc_diffs=0\r\nCATGCTGCCTCCCGTAGGAGTCTGGGCCGTATCTCAGTCCCAATGTGGCCGGTCACCCTCTCAGGCCGGCTACCCGTCAAAGCCTTGGTAAGCCACTACCCCACCAACAAGCTGATAAGCCGCGAGTCCATCCCCAACCGCCGAAACTTTCCAACCCCCACCCATGCAGCAGGAGCTCCTATCCGGTATTAGCCCCAGTTTCCTGAAGTTATCCCAAAGTCAAGGGCAGGTTACTCACGTGTTACTCACCCGTTCGCCA\r\n>F22Frhd_2 040027_1369_1966 orig_bc=CAAGTGAGAGAG new_bc=CAAGTGAGAGAG bc_diffs=0\r\nCATGCTGCCTCCCGTAGGAGTCTGGGCCGTATCTCAGTCCCAATGTGGCCGGTCACCCTCTCAGGCCGGCTACCCGTCAAAGCCTTGGTAAGCCACTACCCCACCAACAAGCTGATAAGCCGCGAGTCCATCCCCAACCGCCGAAACTTTCCAACCCCCACCCATGCAGCAGGAGCTCCTATCCGGTATTAGCCCCAGTTTCCTGAAGTTATCCCAAAGTCAAGGGCAGGTTACTCACGTGTTACTCACCCGTTCGCCA\r\n>F12Labi_3 040135_0934_1957 orig_bc=AGTTAGTGCGTC new_bc=AGTTAGTGCGTC bc_diffs=0\r\nCATGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTACTGATCGTTGCCTTGGTGGGCCGTTACCCCGCCAACAAGCTAATCAGACGCATCCCCATCCATAACCGATAAATCTTTATTCGTAATCTCATGAGATCAAACGAATACATAAGGTATTAGTCCAACTTTGCTGGGTTAGTCCCTTACGTTATTGGGCGAGGTTGGATACGCGTTACTCACCCGTGCGCCGGTCGCCG\r\n\"\"\".splitlines()\r\n qual_raw = \"\"\">039695_0364_2008 length=49 uaccno=FFLHOYS01A5986\r\n35 35 35 35 35 35 35 35 35 32 30 30 33 33 35 35 35 35 35 34 34 34 36 36 36 36 36 35 35 36 36 36 36 36 40 37 37 37 37 38 39 38 37 38 36 35 35 35 35\r\n>039732_1312_3088 length=271 uaccno=FFLHOYS01DHI8I\r\n37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37\r\n37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37\r\n37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 38 38 33 33 34 34 36 36 37 37 35 24 19 19 19 38 38 37 37 37\r\n37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 38 38 38 38 38 37 38 38 38 38 38 38 38 37 37 38 38 38 31 31 33 36 33 33 33 36 36 36 36 24 25 25 28 31 36 36 36 36 36 36 36 38\r\n38 38 40 40 38 32 31 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 30 30 30 31 32 32 32\r\n>040027_1369_1966 length=271 uaccno=FFLHOYS01DMIIO\r\n37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37\r\n37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 34 34 34 34 37 37 37 37 37 37\r\n37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 26 26 24 38 32 22 22 15 15 15 15 15 20 16 16 16 38 38 37 37 37\r\n37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 37 38 38 34 34 34 37 37 38 28 28 27 36 33 33 33 36 36 36 36 32 32 32 33 36 36 36 38 37 37 36 37 38\r\n38 38 38 38 38 31 31 32 32 32 32 32 32 32 32 32 32 32 32 31 28 28 28 32 31 31 31 31 32 32 32\r\n>040135_0934_1957 length=281 uaccno=FFLHOYS01CKBO3\r\n33 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 40 40 40 40 38 38 38 39 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 40 35 35 35 35 35 35 35 35 35 35 35 35 35 28 28\r\n28 28 28 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 33 26 26 26 26 33 35 35 35 35 35\r\n35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 26 26 26 30 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35\r\n35 35 30 30 30 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 35 27 27 25 15 15 15 18 18 25 15 15 15 15 15 15 14 15 15 15 15 15 15 15 14 15 15 15 15 15 15 23 23 28\r\n28 24 30 31 32 22 22 16 16 16 16 22 22 23 25 21 21 21 21 21 19 21 16 16 16 16 16 22 21 23 25 25 25 21 22 22 22 22 22 22 22\r\n\"\"\".splitlines()\r\n qual = parse_qual_score(qual_raw)\r\n result = list(iter_fastq(fasta, qual))\r\n self.assertEqual(len(result), 3)\r\n self.assertEqual(result[0][1], 'M32Nstr_1')\r\n self.assertEqual(result[1][1], 'F22Frhd_2')\r\n self.assertEqual(result[2][1], 'F12Labi_3')\r\n\r\n lines = result[0][0].splitlines()\r\n self.assertEqual(lines[1][:5], 'CATGC')\r\n self.assertEqual(lines[3][:5], chr(33 + 37) * 5)\r\n self.assertEqual(\r\n lines[3][-5:], ''.join(map(chr, [33 + 30, 33 + 31, 33 + 32, 33 + 32, 33 + 32])))",
"def readline(self):\n line = \"\"\n n_pos = -1\n try:\n while n_pos < 0:\n line += self.next_chunk()\n n_pos = line.find('\\n')\n except StopIteration:\n pass\n\n if n_pos >= 0:\n line, extra = line[:n_pos+1], line[n_pos+1:]\n self.unshift(extra)\n return line",
"def readline(self, size=-1):\n ...",
"def read_fasta(fasta_name):\n \n \"\"\"first open the file outside \"\"\"\n file_handler = open(fasta_name)\n\n # ditch the boolean (x[0]) and just keep the header or sequence since\n # we know they alternate.\n fasta_iter = (x[1] for x in groupby(file_handler, lambda line: line[0] == \">\"))\n\n for header in fasta_iter:\n # drop the \">\"\n headerStr = header.__next__()[1:].strip()\n\n # join all sequence lines to one.\n seq = \"\".join(s.strip() for s in fasta_iter.__next__())\n\n # yield (headerStr, seq)\n result_record = {'header':headerStr,'seqRecord':seq}\n return result_record",
"def get_reader(fname):\n\n if fname == \"-\":\n fh = sys.stdin\n else:\n fh = open(fname, \"r\")\n \n rdr = csv.reader(fh, dialect=\"psv\")\n return (rdr, fh)",
"def parse_text(filehandle: TextIO) -> Iterator[Fasta]:\n\n # Check that the file looks like UniProt text format\n first_line = next(filehandle)\n if not first_line.startswith(\"ID\"):\n raise TextParserError(\n \"Unexpected file format: first line of UniProt text file should start with 'ID'\"\n )\n filehandle.seek(0)\n\n fasta = Fasta(sequence=\"\")\n for line in filehandle:\n key = line[:2] # This is more efficient than using line.startswith\n if key == \"ID\":\n tokens = line.split()\n fasta.entry_name = tokens[1]\n fasta.reviewed = True if tokens[2] == \"Reviewed;\" else False\n elif key == \"AC\":\n if fasta.accession is None:\n accessions = line[5:].rstrip(\";\\n\").split(\"; \")\n fasta.accession = accessions[0]\n elif key == \"DT\":\n if \"sequence version\" in line:\n tokens = line[5:].strip(\".\\n\").split()\n fasta.version = int(tokens[3])\n elif key == \"DE\":\n if \"RecName\" in line:\n fasta.name = _extract_name(line)\n # Get the first SubName if no RecName found\n elif fasta.name is None and line[5:12] == \"SubName\":\n fasta.name = _extract_name(line)\n elif line[5:10] == \"Flags\" and \"Fragment\" in line:\n fasta.fragment = True\n elif key == \"GN\":\n if line[5:10] == \"Name=\":\n tokens = line[10:].split(\";\")\n # Remove evidence tags, if present\n gene_tokens = tokens[0].split(\" {\")\n fasta.gene = gene_tokens[0]\n elif key == \"OS\":\n # TODO: check for multiline species name (excluding brackets)\n if fasta.species is None:\n species_line = line[5:].strip().split(\" (\")\n fasta.species = species_line[0].strip(\".\")\n elif key == \"OX\":\n if \"NCBI_TaxID\" in line:\n tokens = line[5:].strip(\";\\n\").split(\"; \")\n # Remove evidence tag if present\n taxid_tokens = tokens[0][11:].split(\" {\")\n fasta.taxid = taxid_tokens[0]\n elif key == \"PE\":\n fasta.evidence = int(line[5])\n elif key == \" \":\n sequence_line = line.strip().replace(\" \", \"\")\n fasta.sequence += sequence_line\n elif key == \"//\":\n yield fasta\n fasta = Fasta(sequence=\"\")",
"def read_fasta(self, handle):\n read = \"\"\n for line in handle:\n if line[0] == \">\":\n if len(read):\n self.add_read(read)\n read = \"\"\n else:\n read += line.strip()\n self.add_read(read)",
"def readFasta(self, fp):\n\t\t\n\t\tfor head, seq in self.parseFasta(fp):\n\t\t\t#analyzing the sequence\n\t\t\tself.analyzeSequence(seq)\n\t\t\t#saving the header\n\t\t\tif head == '':\n\t\t\t\tcontinue\n\t\t\telse:\t\n\t\t\t\tself.header.append(head)",
"def readline(self) -> bytes | None:",
"def getOneRead(self, f, q, s):\n probs = np.power(10, q / -10)\n bases = []\n f.seek(s)\n n = 0\n while True:\n b = f.read(1)\n if b == \"\\n\":\n continue\n if random.random() < probs[n]:\n b = random.choice('ACGT')\n else:\n b = self.getAllele(b, f.tell() - 1)\n bases.append(b)\n n += 1\n if n == self.readlen:\n break\n return bases",
"def readline(self):\n if self.index < self.length:\n result = self.store[self.index:]\n elif False == self.closed:\n result = self.input.readline()\n self.lineNumber += 1\n else:\n result =''\n self.index = 0\n self.length = 0\n return result",
"def parse(f=None):\n\n f = f if f else sys.stdin\n return _parseHelper(f)",
"def file():\r\n return deque()",
"def timed_readline(self, f, timeout):\n set_file_nonblock(f)\n\n output = []\n inactive = 0\n while 1:\n (rlist, dummy_wlist, dummy_xlist) = select.select(\n [f], [], [], 1.0)\n\n if not rlist:\n inactive += 1 # approx -- py select doesn't return tv\n if inactive >= timeout:\n break\n else:\n inactive = 0\n c = f.read(1)\n output.append(c) # keep newline\n if c == '' or c == '\\n':\n break\n\n set_file_nonblock(f, non_blocking=False)\n\n if inactive >= timeout:\n raise TimeoutError # note, an incomplete line can be lost\n else:\n return ''.join(output)",
"def reverse_readline(filename, buf_size=8192):\n with open(filename, \"r\", encoding=\"utf-8\") as fh:\n segment = None\n offset = 0\n fh.seek(0, os.SEEK_END)\n file_size = remaining_size = fh.tell()\n while remaining_size > 0:\n offset = min(file_size, offset + buf_size)\n fh.seek(file_size - offset)\n buffer = fh.read(min(remaining_size, buf_size))\n remaining_size -= buf_size\n lines = buffer.split(\"\\n\")\n # the first line of the buffer is probably not a complete line so\n # we'll save it and append it to the last line of the next buffer\n # we read\n if segment is not None:\n # if the previous chunk starts right from the beginning of line\n # do not concact the segment to the last line of new chunk\n # instead, yield the segment first\n if buffer[-1] is not \"\\n\":\n lines[-1] += segment\n else:\n yield segment\n segment = lines[0]\n for index in range(len(lines) - 1, 0, -1):\n if len(lines[index]):\n yield lines[index]\n # Don't yield None if the file was empty\n if segment is not None:\n yield segment",
"def fasta_reader(inp):\n #inp is hard coded as \"Sequence1/2.fasta in this script\".\n with open(inp) as in_file: \n for line in in_file.readlines():\n #Guarantees sequence is pulled from the FASTA file not the title \n if line[0].isalpha():\n seq = line.rstrip()\n return (seq)",
"def readFastaFile(filename):\n info={}\n fhr=open(filename,\"r\")\n while(True):\n line=fhr.readline()\n if not line: break\n if(\">\" in line):\n try:\n info[line.strip()[1:].split()[0]]=fhr.readline().strip()\n except ValueError:\n pass\n return info",
"def reverse_readline(\n fh: IO, start_byte: int = 0, buf_size: int = 8192\n) -> Generator[str, None, None]:\n segment: OptionalType[str] = None\n offset = 0\n if start_byte:\n fh.seek(start_byte)\n else:\n fh.seek(0, os.SEEK_END)\n total_size = remaining_size = fh.tell()\n while remaining_size > 0:\n offset = min(total_size, offset + buf_size)\n fh.seek(-offset, os.SEEK_END)\n buf = fh.read(min(remaining_size, buf_size))\n remaining_size -= buf_size\n lines = buf.decode(sys.getfilesystemencoding()).split('\\n')\n # the first line of the buffer is probably not a complete line so\n # we'll save it and append it to the last line of the next buffer\n # we read\n if segment is not None:\n # if the previous chunk starts right from the beginning of line\n # do not concact the segment to the last line of new chunk\n # instead, yield the segment first\n if buf[-1] != '\\n':\n lines[-1] += segment\n else:\n yield segment\n segment = lines[0]\n for index in range(len(lines) - 1, 0, -1):\n if len(lines[index]):\n yield lines[index]\n yield segment",
"def stream_fastq(fpath):\n with open(fpath, 'r') as fp:\n ### initialization \n dct = dict()\n \n ### iterate through the file\n for cnt, line in enumerate(fp):\n # yield the results for every four lines and\n # reset the dictionary\n if (cnt != 0) and (cnt % 4 == 0):\n yield dct\n dct = dict()\n \n # process each line and add to a dictionary \n idx = cnt % 4\n key = KEYS[idx]\n line = line.strip()\n dct[key] = line",
"def reverse_readline(fh, buf_size=8192):\n segment = None\n offset = 0\n fh.seek(0, os.SEEK_END)\n file_size = remaining_size = fh.tell()\n while remaining_size > 0:\n offset = min(file_size, offset + buf_size)\n fh.seek(file_size - offset)\n buffer = fh.read(min(remaining_size, buf_size))\n remaining_size -= buf_size\n lines = buffer.split('\\n')\n # the first line of the buffer is probably not a complete line so\n # we'll save it and append it to the last line of the next buffer\n # we read\n if segment is not None:\n # if the previous chunk starts right from the beginning of line\n # do not concact the segment to the last line of new chunk\n # instead, yield the segment first\n if buffer[-1] is not '\\n':\n lines[-1] += segment\n else:\n yield segment\n segment = lines[0]\n for index in range(len(lines) - 1, 0, -1):\n if len(lines[index]):\n yield lines[index]\n # Don't yield None if the file was empty\n if segment is not None:\n yield segment",
"def get_read_parser(format):\n format = format.lower()\n if format == 'bed':\n return BedReadParser\n elif format == 'bedpe':\n return BedPeReadParser\n elif format == 'sam':\n return SamReadParser\n elif format == 'bam':\n return BamReadParser\n else:\n raise ValueError(f\"unknown read file format: {format!r}\")",
"def FastaIterator(fh):\n def readTotitle(fh):\n \"\"\"returns a tuple ([lines before the next title line], next tile line)\n \"\"\"\n preLines = []\n while True:\n l = fh.readline().strip()\n if l.startswith('>'):\n return (preLines,l)\n elif l == '':\n return preLines,None\n else:\n preLines.append(l)\n\n\n if type(fh) in StringTypes:\n fh = file(fh)\n\n preLines,nextTitleLine =readTotitle(fh)\n\n while nextTitleLine != None:\n title = nextTitleLine[1:].rstrip()\n preLines,nextTitleLine=readTotitle(fh)\n yield (title,''.join(map(lambda x: x.rstrip(),preLines)))",
"def _readline(self):\n return self.ser.readline().decode(\"ASCII\").strip()",
"def next(self):\n # ++++ Get Next Four Lines ++++\n elemList = []\n for i in range(4):\n line = self._file.readline()\n self._currentLineNumber += 1 ## increment file position\n if line:\n elemList.append(line.strip('\\n'))\n else:\n elemList.append(None)\n \n # ++++ Check Lines For Expected Form ++++\n trues = [bool(x) for x in elemList].count(True)\n nones = elemList.count(None)\n # -- Check for acceptable end of file --\n if nones == 4:\n raise StopIteration\n # -- Make sure we got 4 full lines of data --\n assert trues == 4,\\\n \"** ERROR: It looks like I encountered a premature EOF or empty line.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._currentLineNumber)\n # -- Make sure we are in the correct \"register\" --\n assert elemList[0].startswith(self._hdSyms[0]),\\\n \"** ERROR: The 1st line in fastq element does not start with '%s'.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._hdSyms[0],self._currentLineNumber)\n assert elemList[2].startswith(self._hdSyms[1]),\\\n \"** ERROR: The 3rd line in fastq element does not start with '%s'.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._hdSyms[1],self._currentLineNumber)\n # -- Make sure the seq line and qual line have equal lengths --\n assert len(elemList[1]) == len(elemList[3]), \"** ERROR: The length of Sequence data and Quality data of the last record aren't equal.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._currentLineNumber)\n \n # ++++ Return fatsQ data as tuple ++++\n return tuple(elemList)",
"def _parseHelper(fastaInput):\n\n desc, sequence = \"\", []\n\n for rline in fastaInput:\n line = rline.rstrip()\n if not line:\n break\n elif line[0] == \">\":\n if sequence: \n yield desc, ''.join(sequence)\n sequence = []\n desc = line[1:]\n else:\n sequence.append(line)\n\n if desc and sequence:\n yield desc, ''.join(sequence)",
"def serial_read(useParse=False, header='$', tail='#'):\n global ser, recvBuff, startRecord\n retData = ''\n if useParse:\n if ser.readable():\n while ser.inWaiting():\n c = ser.read(1)\n if c == header:\n startRecord = True\n recvBuff = ''\n elif c == tail:\n startRecord = False\n if recvBuff != '':\n #print 'I get: ', recvBuff\n retData = recvBuff\n elif startRecord:\n recvBuff += c\n else:\n pass\n else:\n print 'The serial', ser.portstr, 'cannot be read.'\n pass\n else:\n if ser.readable():\n while ser.inWaiting():\n retData += ser.read(1)\n else:\n print 'The serial', ser.portstr, 'cannot be read.'\n pass\n return retData",
"def fromString(cls, s):\n try:\n lines = s.splitlines()\n assert len(lines) > 1\n assert lines[0][0] == cls.DELIMITER\n name = lines[0][1:]\n sequence = \"\".join(lines[1:])\n return FastaRecord(name, sequence)\n except AssertionError:\n raise ValueError(\"String not recognized as a valid FASTA record\")",
"def get_input_source(parser):\n\tgroup=parser.add_mutually_exclusive_group(required=False)\n\tgroup.add_argument('--file_slice', dest='file_slice', help=\"FILE_SLICE format infile:start-end. Where start and end are byte indexes.\", default=None)\n\tgroup.add_argument('--file', dest='file', help=\"FILE to read from.\", default=None)\n\targs = parser.parse_args()\n\n\tif args.file_slice:\n\t\tinfile,section,=args.file_slice.split(':')\n\t\tstart,end,=[int(x) for x in section.split('-')]\n\t\tinput=Chunk(infile, start, end)\n\telif args.file:\n\t\t#extend the file type to include start and end fields\n\t\tinput=type('I',(file,),{'start':-1,'end':-1})(args.file, 'r')\n\t\tinput.start=0\n\telse:\n\t\tclass I(object):\n\t\t\tstart=0\n\t\t\tend=-1\n\t\t\tdef xreadlines(self): return sys.stdin.xreadlines()\n\t\t\tdef read(self): return sys.stdin.read()\n\t\tinput=I()\n\treturn input",
"def parse_multifasta_file(file, number_of_fastas):\n\n with open(file) as file:\n for i in range(number_of_fastas):\n fasts_seq = ''\n fasta_name = file.readline().strip()[1:]\n end_of_file = False\n end_of_seq = False\n while not end_of_seq and not end_of_file:\n x = file.tell()\n seq = file.readline()\n if not seq:\n end_of_file = True\n elif '>' not in seq:\n fasts_seq = fasts_seq + seq\n else:\n file.seek(x)\n end_of_seq = True\n fasts_seq = re.sub(r'\\n', '', fasts_seq)\n yield fasta_name, fasts_seq",
"def readline(self) -> Optional[bytes]:\n ...",
"def readline(self):\n sep = b'\\n'\n seplen = len(sep)\n try:\n line = yield from self.readuntil(sep)\n except IncompleteReadError as e:\n return e.partial\n except LimitOverrunError as e:\n if self._buffer.startswith(sep, e.consumed):\n del self._buffer[:e.consumed + seplen]\n else:\n self._buffer.clear()\n self._maybe_resume_transport()\n raise ValueError(e.args[0])\n return line",
"def read(cls, text):\n\n\t\treturn cls._parse(cls._tokenize(text))",
"def qfile_t_from_fp(*args) -> \"qfile_t *\":\n return _ida_fpro.qfile_t_from_fp(*args)",
"def __init__(self, filename_or_filelike, parsers, infinite=False):\r\n self._buffer = Buffer(filename_or_filelike, infinite)\r\n self._head = None\r\n self._tail = []\r\n self._infinite = infinite\r\n self._parsers = parsers",
"def CreateParser(skip_meta: bool = False):\n return ParserWithLines(skip_meta)",
"def readFastaFile(filename):",
"def file_reading_iterator_raw(filename, options='r'):\n # actual loop\n with open(filename, options) as f:\n while True:\n line = f.readline()\n if not line:\n break\n # return line\n yield line",
"def next(self):\n\n lines = []\n while 1: \n # if at beginning, skip the AS and look for first CO command\n line=self._uhandle.readline()\n if not line: # empty or corrupt file\n return None\n if line[:2]=='CO':\n lines.append(line)\n break\n while 1:\n line = self._uhandle.readline()\n if not line:\n break\n # If a new record, then put the line back and stop.\n if lines and line[:2] == 'CO':\n self._uhandle.saveline(line)\n break\n lines.append(line)\n\n if not lines:\n return None\n\n data = ''.join(lines)\n if self._parser is not None:\n return self._parser.parse(File.StringHandle(data))\n return data",
"def get_fastq(self):\n\t\tif self.have_fastqs is False:\n\t\t\tself._extract_fastqs_from_fast5()\n\t\t\tself.have_fastqs = True\n\n\t\tif not self.fastqs:\n\t\t\treturn None\n\t\telif self.fastqs.get('twodirections') is not None:\n\t\t\treturn self.fastqs.get('twodirections')\n\t\telif self.fastqs.get('template') is not None:\n\t\t\treturn self.fastqs.get('template')\n\t\telif self.fastqs.get('complement') is not None:\n\t\t\treturn self.fastqs.get('complement')",
"def read_incoming(self):\r\n buf = ''\r\n debug_prompt = re.compile(r'\\A[\\w]+>>? ')\r\n while 1:\r\n try:\r\n buf += os.read(self.fid, 100).decode('utf8')\r\n except:\r\n self.queue.put(None)\r\n return\r\n lines = buf.splitlines()\r\n for line in lines[:-1]:\r\n self.queue.put(line)\r\n if buf.endswith('\\n'):\r\n self.queue.put(lines[-1])\r\n buf = ''\r\n elif re.match(debug_prompt, lines[-1]):\r\n self.queue.put(lines[-1])\r\n buf = ''\r\n else:\r\n buf = lines[-1]",
"def parse_sequence(sequence):\n return FastaEntry.from_text(sequence)",
"def fasta_parser(filename):\n fasta = {}\n with open(filename) as f:\n contents = f.read()[1:].split('\\n>')\n for section in contents:\n sample = section.split('\\n')\n sample_id = sample[0]\n seq = ''.join(sample[1:]).strip()\n fasta[sample_id] = seq\n return fasta",
"def Parse_Fasta(filename):\n dic = {}\n name = None\n seq = ''\n with open(filename) as F:\n for line in F:\n if line.startswith('>'):\n if name is not None:\n dic[name] = seq\n seq = ''\n name = line.strip()\n else:\n seq += line\n if not name in dic:\n dic[name] = seq\n return dic",
"def parse_file(self, path):\r\n return self._parse(antlr3.ANTLRFileStream(path))",
"def read_records(self, input_path, offset_range_tracker):\n start_offset = offset_range_tracker.start_position()\n with tf.io.gfile.GFile(input_path, 'r') as f:\n f.seek(start_offset)\n # Read lines in and proceed once a `+` is in the 3rd position.\n record = collections.deque([], 4)\n while True:\n line = f.readline()\n if not line:\n # End of file reached\n break\n record.append(line.strip())\n if len(record) == 4 and record[0].startswith('@') and record[2] == '+':\n # Once a '+' is in position 3 a full record exists in the record.\n read_name = record[0]\n sequence = record[1]\n qual = record[3]\n self.fastq_records_counter.inc()\n yield (read_name, sequence, qual)\n record.clear()",
"def __getitem__(self, rname):\n return FastaRecord(rname, self)",
"def parse(program):\n return read_from_tokens(tokenize(program))",
"def test_fortran_reader_notbasic():\n\n tabstr = dedent(\n \"\"\"\n a b\n 1 1.23D4\n 2 5.67D-8\n \"\"\"\n )[1:-1]\n\n t1 = ascii.read(tabstr.split(\"\\n\"), fast_reader={\"exponent_style\": \"D\"})\n\n assert t1[\"b\"].dtype.kind == \"f\"\n\n tabrdb = dedent(\n \"\"\"\n a\\tb\n # A simple RDB table\n N\\tN\n 1\\t 1.23D4\n 2\\t 5.67-008\n \"\"\"\n )[1:-1]\n\n t2 = ascii.read(\n tabrdb.split(\"\\n\"), format=\"rdb\", fast_reader={\"exponent_style\": \"fortran\"}\n )\n\n assert t2[\"b\"].dtype.kind == \"f\"\n\n tabrst = dedent(\n \"\"\"\n = =======\n a b\n = =======\n 1 1.23E4\n 2 5.67E-8\n = =======\n \"\"\"\n )[1:-1]\n\n t3 = ascii.read(tabrst.split(\"\\n\"), format=\"rst\")\n\n assert t3[\"b\"].dtype.kind == \"f\"\n\n t4 = ascii.read(tabrst.split(\"\\n\"), guess=True)\n\n assert t4[\"b\"].dtype.kind == \"f\"\n\n # In the special case of fast_converter=True (the default),\n # incompatibility is ignored\n t5 = ascii.read(tabrst.split(\"\\n\"), format=\"rst\", fast_reader=True)\n\n assert t5[\"b\"].dtype.kind == \"f\"\n\n with pytest.raises(ParameterError):\n ascii.read(tabrst.split(\"\\n\"), format=\"rst\", guess=False, fast_reader=\"force\")\n\n with pytest.raises(ParameterError):\n ascii.read(\n tabrst.split(\"\\n\"),\n format=\"rst\",\n guess=False,\n fast_reader={\"use_fast_converter\": False},\n )\n\n tabrst = tabrst.replace(\"E\", \"D\")\n\n with pytest.raises(ParameterError):\n ascii.read(\n tabrst.split(\"\\n\"),\n format=\"rst\",\n guess=False,\n fast_reader={\"exponent_style\": \"D\"},\n )",
"def fasta(file_path):\n \n print(f\"Parsing fasta '{file_path}'\")\n data = {\n 'ur_up_': [], 'accession': [],\n 'entry_name': [], 'offset': [],\n 'taxonomy': [], 'sequence': []\n }\n\n with open(file_path, 'r') as f:\n for i, line in enumerate(f):\n line = line.strip()\n \n if line[0] == '>':\n key = line[1:]\n \n if i == 0:\n name, offset = key.split(\"/\")\n ur_up_, acc = None, None\n else:\n ur_up_, acc, name_offset = key.split(\"|\")\n name, offset = name_offset.split('/')\n \n data['ur_up_'].append(ur_up_)\n data['accession'].append(acc)\n data['entry_name'].append(name)\n data['offset'].append(offset)\n data['sequence'].append('')\n data['taxonomy'].append(name.split('_')[1])\n else:\n data['sequence'][-1] += line\n \n if i and (i % 50000 == 0):\n print(f\"Reached: {i}\")\n\n return pd.DataFrame(data=data)",
"def parse(self, filehandle):\n l = filehandle.readline()\n if l.split()[0] != '##maf':\n return\n else:\n self.setpar(l.split()[1:])\n\n l=filehandle.readline()\n while l:\n la = l.split()\n## print la\n if(len(la)==0 or la[0]=='#'):\n## print \"skipping\"\n 1\n elif(la[0]=='a'):\n## print \"reading alignment\"\n self.readalign(la[1:], filehandle)\n else:\n## print \"end of records\"\n return\n\n l=filehandle.readline()"
] | [
"0.6683479",
"0.60699964",
"0.6053965",
"0.6047205",
"0.60288286",
"0.5997878",
"0.5994087",
"0.5935865",
"0.5871207",
"0.57843333",
"0.5736459",
"0.5680758",
"0.5660535",
"0.55910975",
"0.55550677",
"0.55075556",
"0.54911923",
"0.5433011",
"0.5417076",
"0.5411586",
"0.5405132",
"0.5394496",
"0.53842074",
"0.5381334",
"0.5380335",
"0.5371065",
"0.53695095",
"0.5365754",
"0.5357577",
"0.5342128",
"0.53310084",
"0.5312088",
"0.52840936",
"0.52728176",
"0.5270762",
"0.5264343",
"0.5263716",
"0.52591586",
"0.52567273",
"0.52567273",
"0.52524817",
"0.52492076",
"0.52346843",
"0.5225352",
"0.5223851",
"0.520687",
"0.52024287",
"0.5188484",
"0.51870984",
"0.5186374",
"0.5183004",
"0.5166494",
"0.516561",
"0.51649255",
"0.5160709",
"0.51439655",
"0.51393694",
"0.5130666",
"0.51218987",
"0.5121286",
"0.51207584",
"0.51196164",
"0.5116402",
"0.511529",
"0.5113849",
"0.5108617",
"0.510538",
"0.5101298",
"0.5070292",
"0.50692487",
"0.5061446",
"0.50542843",
"0.5053517",
"0.50522065",
"0.5043095",
"0.50194514",
"0.49877927",
"0.49871802",
"0.49864239",
"0.49820474",
"0.49783623",
"0.49563658",
"0.49541146",
"0.49502137",
"0.49445474",
"0.49381518",
"0.49378192",
"0.4934055",
"0.4933946",
"0.49280465",
"0.4927683",
"0.49220502",
"0.49158937",
"0.49067193",
"0.48904806",
"0.48811838",
"0.4878553",
"0.4875356",
"0.487434",
"0.48691472",
"0.48646614"
] | 0.0 | -1 |
Reads in next element, parses, and does minimal verification. | def __next__(self):
# ++++ Get Next Four Lines ++++
elemList = []
for i in range(4):
line = self._file.readline()
self._currentLineNumber += 1 ## increment file position
if line:
elemList.append(line.strip('\n'))
else:
elemList.append(None)
# ++++ Check Lines For Expected Form ++++
trues = [bool(x) for x in elemList].count(True)
nones = elemList.count(None)
# -- Check for acceptable end of file --
if nones == 4:
raise StopIteration
# -- Make sure we got 4 full lines of data --
assert trues == 4,\
"** ERROR: It looks like I encountered a premature EOF or empty line.\n\
Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**" % (self._currentLineNumber)
# -- Make sure we are in the correct "register" --
assert elemList[0].startswith(self._hdSyms[0]),\
"** ERROR: The 1st line in fastq element does not start with '%s'.\n\
Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**" % (self._hdSyms[0],self._currentLineNumber)
assert elemList[2].startswith(self._hdSyms[1]),\
"** ERROR: The 3rd line in fastq element does not start with '%s'.\n\
Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**" % (self._hdSyms[1],self._currentLineNumber)
# -- Make sure the seq line and qual line have equal lengths --
assert len(elemList[1]) == len(elemList[3]), "** ERROR: The length of Sequence data and Quality data of the last record aren't equal.\n\
Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**" % (self._currentLineNumber)
# ++++ Return fatsQ data as tuple ++++
return tuple(elemList) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def next():",
"def next():",
"def next_element(self):\n return self.extract_element()",
"def has_next():",
"def next(self):\r\n\t\tself.index += 1\r\n\t\treturn not self.eof()",
"def test_parse_valid(self):\n mock_scraper = MockCtdScraper()\n scrape_gen = mock_scraper.scrape(TEST_CHUNKSIZE)\n self.parser.parse(next(scrape_gen))",
"def has_next(self):\n try:\n self.next()\n return True\n except (ParseException, struct.error):\n return False",
"def next(self):\r\n pass",
"def next(self):\n pass",
"def next(self):\n pass",
"def next(self):\n pass",
"def next(self):\n pass",
"def next(self):\n self.record_offset += 2 ** self.blockettes[1000]['Data Record Length']\n self._parseHeader()",
"def next(self):\n # ++++ Get Next Four Lines ++++\n elemList = []\n for i in range(4):\n line = self._file.readline()\n self._currentLineNumber += 1 ## increment file position\n if line:\n elemList.append(line.strip('\\n'))\n else:\n elemList.append(None)\n \n # ++++ Check Lines For Expected Form ++++\n trues = [bool(x) for x in elemList].count(True)\n nones = elemList.count(None)\n # -- Check for acceptable end of file --\n if nones == 4:\n raise StopIteration\n # -- Make sure we got 4 full lines of data --\n assert trues == 4,\\\n \"** ERROR: It looks like I encountered a premature EOF or empty line.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._currentLineNumber)\n # -- Make sure we are in the correct \"register\" --\n assert elemList[0].startswith(self._hdSyms[0]),\\\n \"** ERROR: The 1st line in fastq element does not start with '%s'.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._hdSyms[0],self._currentLineNumber)\n assert elemList[2].startswith(self._hdSyms[1]),\\\n \"** ERROR: The 3rd line in fastq element does not start with '%s'.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._hdSyms[1],self._currentLineNumber)\n # -- Make sure the seq line and qual line have equal lengths --\n assert len(elemList[1]) == len(elemList[3]), \"** ERROR: The length of Sequence data and Quality data of the last record aren't equal.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._currentLineNumber)\n \n # ++++ Return fatsQ data as tuple ++++\n return tuple(elemList)",
"def next(self):\n raise NotImplementedError",
"def next(self):\n nxt = self.readentry()\n if nxt is None:\n raise StopIteration\n return nxt",
"def _advance(self, idlist=None):\n if self.token.id == \"END\":\n return\n if idlist and self.token.id in idlist:\n self.token = next(self.token_gen)\n elif not idlist:\n self.token = next(self.token_gen)\n else:\n raise ParseError(\n \"\"\"Expected one of %s found %r instead. (line: %i)\"\"\"\n % (\" \".join(idlist), self.token.id, self.line)\n )",
"def next( self ):\n next(self)",
"def test__parse_next(value, position, expected_output, expected_position):\n state = ParserState(value)\n state.position = position\n \n output = parse_next(state)\n vampytest.assert_instance(output, tuple)\n vampytest.assert_eq(output, expected_output)\n vampytest.assert_eq(state.position, expected_position)",
"def has_next():\n\n return True",
"def next(self):\n return _libsbml.XMLInputStream_next(self)",
"def test_next():\n members = tuple(BlogPostStatus)\n\n for (number, member) in enumerate(members[:-1], 1):\n yield (tools.eq_, member.next, members[number])\n\n yield (tools.assert_is_none, members[-1].next)",
"def nextevent(self):\n self.validEvent=False\n self.currentTrueTrk=-1\n self.aLine = self.nextline()\n if self.aLine == \"\":\n return self.validEvent\n \n # Now we have a valid file and a line is loaded\n while True:\n while re.match( 'F', self.aLine ) is None:\n self.aLine = self.nextline()\n if self.aLine == \"\":\n break\n splitline = self.aLine.split()\n if len(splitline) < 2:\n # File done or problem reading\n self.validEvent=False\n return self.validEvent\n try:\n self.eventnum = int(splitline[1])\n except:\n # This would be weird. We have an F but no evt number.\n self.eventnum = -1\n self.globalEvtNum = self.globalEvtNum + 1\n if self.verbose:\n if self.globalEvtNum%1000 == 0:\n print \"Global event %d, local %d\"%(self.globalEvtNum,self.eventnum)\n break\n\n # Now we have a valid event\n self.validEvent=True\n return self.validEvent",
"def read_event(self):\n tree = self.tree\n while True:\n tree_event, elem = next(self.context)\n\n if tree_event == _EVENT_START:\n parent = tree[elem.getparent()] if elem.getparent() in tree else None\n\n if elem.tag.endswith(xes_constants.TAG_EVENT):\n self.event = Event()\n tree[elem] = self.event\n self.reading_event = True\n continue\n\n if self.reading_event:\n if elem.tag.endswith(xes_constants.TAG_STRING):\n if parent is not None:\n tree = parse_attribute(elem, parent, elem.get(xes_constants.KEY_KEY),\n elem.get(xes_constants.KEY_VALUE), tree)\n continue\n\n elif elem.tag.endswith(xes_constants.TAG_DATE):\n try:\n dt = self.date_parser.apply(elem.get(xes_constants.KEY_VALUE))\n tree = parse_attribute(elem, parent, elem.get(xes_constants.KEY_KEY), dt, tree)\n except TypeError:\n logging.info(\"failed to parse date: \" + str(elem.get(xes_constants.KEY_VALUE)))\n except ValueError:\n logging.info(\"failed to parse date: \" + str(elem.get(xes_constants.KEY_VALUE)))\n continue\n\n elif elem.tag.endswith(xes_constants.TAG_FLOAT):\n if parent is not None:\n try:\n val = float(elem.get(xes_constants.KEY_VALUE))\n tree = parse_attribute(elem, parent, elem.get(xes_constants.KEY_KEY), val, tree)\n except ValueError:\n logging.info(\"failed to parse float: \" + str(elem.get(xes_constants.KEY_VALUE)))\n continue\n\n elif elem.tag.endswith(xes_constants.TAG_INT):\n if parent is not None:\n try:\n val = int(elem.get(xes_constants.KEY_VALUE))\n tree = parse_attribute(elem, parent, elem.get(xes_constants.KEY_KEY), val, tree)\n except ValueError:\n logging.info(\"failed to parse int: \" + str(elem.get(xes_constants.KEY_VALUE)))\n continue\n\n elif elem.tag.endswith(xes_constants.TAG_BOOLEAN):\n if parent is not None:\n try:\n val0 = elem.get(xes_constants.KEY_VALUE)\n val = False\n if str(val0).lower() == \"true\":\n val = True\n tree = parse_attribute(elem, parent, elem.get(xes_constants.KEY_KEY), val, tree)\n except ValueError:\n logging.info(\"failed to parse boolean: \" + str(elem.get(xes_constants.KEY_VALUE)))\n continue\n\n elif elem.tag.endswith(xes_constants.TAG_LIST):\n if parent is not None:\n # lists have no value, hence we put None as a value\n tree = parse_attribute(elem, parent, elem.get(xes_constants.KEY_KEY), None, tree)\n continue\n\n elif elem.tag.endswith(xes_constants.TAG_ID):\n if parent is not None:\n tree = parse_attribute(elem, parent, elem.get(xes_constants.KEY_KEY),\n elem.get(xes_constants.KEY_VALUE), tree)\n continue\n\n elif tree_event == _EVENT_END:\n if elem in tree:\n del tree[elem]\n elem.clear()\n if elem.getprevious() is not None:\n try:\n del elem.getparent()[0]\n except TypeError:\n pass\n\n if elem.tag.endswith(xes_constants.TAG_EVENT):\n self.reading_event = False\n if self.acceptance_condition(self.event):\n return self.event\n continue\n\n elif elem.tag.endswith(xes_constants.TAG_LOG):\n self.reading_log = False\n break",
"def loads(self):\n\n self.ignore_whitespaces()\n\n if self.index >= self.length:\n return\n\n if self.xtext[self.index] == '<':\n self.index += 1\n\n if self.xtext[self.index] == '/':\n self.index += 1\n\n tag = self.read_until('>')\n self.index += 1\n\n elements = []\n while len(self.stack) > 0 and\\\n (isinstance(self.stack[-1], str) or self.stack[-1]['tag'] != tag):\n elements.append(self.stack.pop())\n\n assert len(self.stack) > 0\n\n self.stack[-1]['elements'].extend(reversed(elements))\n\n else:\n self.ignore_whitespaces()\n tag = self.read_until(' >')\n\n attribs = {}\n if self.xtext[self.index] != '>':\n attribs = self.read_attribs()\n\n self.index += 1\n self.stack.append({'tag': tag, 'attribs': attribs, 'elements': []})\n else:\n self.stack.append(self.read_until('<').strip())\n\n self.loads()",
"def parse_next_instruction(self) -> None:\n instruction = self.program[self.pointer]\n opcode = instruction % 100\n if opcode == 99:\n self.halt = True\n\n self.modes = instruction // 100\n\n if opcode == 1:\n self.op_sum()\n if opcode == 2:\n self.op_multiply()\n if opcode == 3:\n self.op_input()\n if opcode == 4:\n self.op_output()\n if opcode == 5:\n self.op_jump_if_true()\n if opcode == 6:\n self.op_jump_if_false()\n if opcode == 7:\n self.op_less_than()\n if opcode == 8:\n self.op_equal_to()\n if opcode == 9:\n self.op_adjust_relative()",
"def next(self):\n raise NotImplementedError('Subclass must define the next method')",
"def next(self, initial):",
"def test_process_next_order(self):\n order_processor = OrderProcessor()\n order_processor.open_order_sheet('COMP_3522_A4_orders.xlsx')\n next_order = next(order_processor.process_next_order())\n self.assertTrue(self, isinstance(next_order, Order))",
"def next(self):\n return self.read_message()",
"def test_next_visit_not_required(self):\n node = self.create_xml_patient({'Next_Visit': ''})\n payload = self.create_payload([node])\n parse_patient(node, payload)\n self.assertEqual(payload.patients.count(), 1)",
"def next(self):\n lines = []\n query = False\n while 1:\n line = self._uhandle.readline()\n if not line:\n break\n # If I've reached the next one, then put the line back and stop.\n if lines and (line.startswith('BLAST')\n or line.startswith('BLAST', 1)\n or line.startswith('<?xml ')):\n self._uhandle.saveline(line)\n break\n # New style files ommit the BLAST line to mark a new query:\n if line.startswith(\"Query=\"):\n if not query:\n if not self._header:\n self._header = lines[:]\n query = True\n else:\n #Start of another record\n self._uhandle.saveline(line)\n break\n lines.append(line)\n\n if query and \"BLAST\" not in lines[0]:\n #Cheat and re-insert the header\n #print \"-\"*50\n #print \"\".join(self._header)\n #print \"-\"*50\n #print \"\".join(lines)\n #print \"-\"*50\n lines = self._header + lines\n \n if not lines:\n return None\n \n data = ''.join(lines)\n if self._parser is not None:\n return self._parser.parse(File.StringHandle(data))\n return data",
"def Next(self):\n ret = libxml2mod.xmlTextReaderNext(self._o)\n return ret",
"def hasNext(self) -> bool:\n return self.elements != []",
"def _next(self):\n i = 0\n while i < self.size:\n if self.data[i] != None:\n yield self.data[i]\n i += 1",
"def next(self):\r\n return self.__next",
"def has_next(self):\n regf = self.first_hbin().parent()\n if regf.hbins_size() + regf.first_hbin_offset() == self._offset_next_hbin:\n return False\n\n try:\n self.next()\n return True\n except (ParseException, struct.error):\n return False",
"def _next(self, filename):\n try:\n return self.tmp_read[filename]['reader'].__next__()\n except StopIteration:\n return None",
"def test_next_reads_all_thoughts(user_info, thought):\n parser = ProtobufSampleParser()\n b = BytesIO(b\"\".join(_build_message_buffer(x) for x in (user_info, thought, thought)))\n out = parser.next(b)\n assert parser.user == user_info\n while out:\n assert out.snapshot == thought\n out = parser.next(b)",
"def next(self):\n return self.__next",
"def _parse_line(self, line):\n\n line_nr = self._current_line_nr.__next__()\n\n self.logger.debug('processing line_nr:{}'.format(line_nr))\n\n try:\n\n element_id = re.compile(\"^<([a-zA-Z0-9]+) *\").search(line).group(1)\n attributes_start_pos = len(element_id)+1\n attributes_end_pos = line.find(\">\")\n attributes = line[attributes_start_pos:attributes_end_pos]\n\n args = {\n 'name': element_id,\n 'id': self._current_item_id.__next__(),\n 'line_nr': line_nr,\n 'parent_id': self._get_last_unclosed_element_id()\n }\n\n self._current_element = Element(**args)\n self._parse_attributes(attributes)\n self._elements.append(self._current_element)\n\n line = line[attributes_end_pos+1:]\n\n except AttributeError:\n element_id = None\n\n try:\n\n end_tag = re.compile(\"</(.+)>$\").search(line).group(1)\n last_element_id = self._get_last_unclosed_element_id()\n\n self.get_element_by_id(last_element_id).set_line_end(line_nr)\n\n self.logger.debug('last_element_id:{} line_nr:{}'.format(\n last_element_id,\n line_nr\n )\n )\n\n len_end_tag = len(end_tag)+3\n line = line[:-len_end_tag]\n\n except AttributeError:\n end_tag = None\n\n if element_id and end_tag and len(line) > 0:\n self._current_element.add_content(line)",
"def parse_element(self, node):\n if node.tag not in self.tags:\n self.error(\"Invalid element <%s> \" % node.tag, node)\n return None # won't be able to parse this one\n return getattr(self, \"parse_\" + node.tag)(node, self.tags[node.tag])",
"def _read(self, in_file):\n in_file.read(18) # pad bytes\n self.numelem = int(in_file.read(12))\n in_file.read(37) # pad bytes\n self.format = int(in_file.read(1))\n in_file.read(1) # eol\n self.elems = []\n\n for _ in range(self.numelem):\n elem = FRDElem()\n self.elems.append(elem)\n if self.format < 2:\n in_file.read(1)\n elem.key = int(in_file.read(2))\n elem.number = int(in_file.read(5*(self.format+1)))\n elem.type = int(in_file.read(5))\n elem.group = int(in_file.read(5))\n elem.material = int(in_file.read(5))\n in_file.read(1) # eol\n elem.nodes = []\n num_nodes = FRDElem.nodesPerType[elem.type]\n num_lines = int(num_nodes/(5*(3-self.format)+1))+1\n for j in range(num_lines):\n in_file.read(3) # pad byte and key = -2\n k_start = j*5*(3-self.format)\n k_end = min(num_nodes - k_start, (j+1)*5*(3-self.format))\n for _ in range(0, k_end):\n elem.nodes.append(\n int(in_file.read(5*(self.format+1))))\n in_file.read(1) # eol\n else:\n elem.number = struct.unpack('i', in_file.read(4))[0]\n elem.type = struct.unpack('i', in_file.read(4))[0]\n num_nodes = FRDElem.nodesPerType[elem.type]\n elem.group = struct.unpack('i', in_file.read(4))[0]\n elem.material = struct.unpack('i', in_file.read(4))[0]\n elem.nodes = struct.unpack(\n 'i'*num_nodes, in_file.read(num_nodes*4))\n\n if self.format < 2:\n in_file.readline() # last record for ascii only",
"def _readMoreXML(self,xmlNode):\n pass",
"def __next__(self):\n raise NotImplementedError(\"next() not implemented!\")",
"def next(self):\n return self.my_next",
"def next(some_list, current_index):\n try:\n return some_list[int(current_index) + 1] # access the next element\n except:\n return '' # return empty string in case of exception",
"def __next__(self):\n if not self._current_node:\n raise StopIteration()\n element = self._current_node._next._element\n self._current_node = self._current_node._next\n return element",
"def nextValues(self):\n return list(i.nextLine[self.idx] for i in self if not i.isFinished)\n\n #def isFinished(self):\n \"\"\"When all the data is read.\"\"\"\n #pass\n\n #def getInitialValue(self):\n \"\"\"Returns the initial alignment value.\"\"\"\n #pass\n\n #def newCurrentValue(self):\n \"\"\"Returns the next alignment value.\"\"\"\n #pass\n\n #def align(self, currentValue):\n \"\"\"Process all the elements of self to make them aligned.\"\"\"\n #pass",
"def next(self):\n self.jumpahead(1)",
"def _next(self):\n node = self.head\n while node != None:\n yield node.data\n node = node.right",
"def hasNext(self) -> bool:\n ...",
"def __next__(self):\n if not self._current_node:\n raise StopIteration()\n element = self._current_node._element\n self._current_node = self._current_node._next\n return element",
"def _next(self):\n i = 0\n while i < self.size:\n yield self.data[i]\n i += 1",
"def __parse_next(self, buffer):\n\t\ttoken = buffer.read(1)\n\t\t\n\t\t_tell = buffer.tell()\n\t\t# Is it an operator?\n\t\tif token == \"/\":\n\t\t\tnum, var = self.__parse_operator(buffer)\n\t\t\tif num is None:\n\t\t\t\tbuffer.seek(_tell - 1)\n\t\t\t\treturn \"$\"\n\t\t\t\n\t\t\tif isinstance(var, str):\n\t\t\t\treturn var\n\t\t\t\n\t\t\tret = (var / num)\n\t\t\tif isinstance(ret, Range):\n\t\t\t\tret = ret.min # XXX is this right?\n\t\t\tif int(ret) != ret:\n\t\t\t\treturn \"%.1f\" % ret\n\t\t\treturn str(int(ret))\n\t\t\n\t\tif token == \"*\":\n\t\t\tnum, var = self.__parse_operator(buffer)\n\t\t\tret = var * num\n\t\t\tif isinstance(ret, float):\n\t\t\t\tret = int(round(ret))\n\t\t\treturn str(ret)\n\t\t\n\t\t# Is it a conditional?\n\t\tif token == \"?\":\n\t\t\tbuffer.seek(-1, SEEK_CUR)\n\t\t\tblocks = self.__parse_conditional(buffer)\n\t\t\t\n\t\t\t# Prepare the condition cache\n\t\t\t# This shouldn't be done here, but anyway...\n\t\t\tfor condition, value in blocks:\n\t\t\t\tcondition.evaluate({})\n\t\t\t\tself.conditions.extend(condition.identifiers)\n\t\t\t\n\t\t\t# blocks is a list of (condition, value) tuples\n\t\t\t# We evaluate the paperdoll against each of them\n\t\t\t# and return when we get a hit\n\t\t\t\n\t\t\tfor condition, value in blocks:\n\t\t\t\tif condition.evaluate(self.paperdoll):\n\t\t\t\t\treturn value\n\t\t\t\n\t\t\treturn\n\t\t\n\t\tif token == \"<\":\n\t\t\tbuffer.seek(-1, SEEK_CUR)\n\t\t\tidentifier = self.__read_block(buffer, startchr=\"<\", endchr=\">\")\n\t\t\ttry:\n\t\t\t\tvalue = self.get_variable(identifier)\n\t\t\t\treturn SpellString(value).format(self.obj, proxy=self.proxy)\n\t\t\texcept VariableNotFound:\n\t\t\t\treturn \"<%s>\" % (identifier)\n\t\t\n\t\tif token == \"{\":\n\t\t\tbuffer.seek(-1, SEEK_CUR)\n\t\t\tblock = self.__read_block(buffer, startchr=\"{\", endchr=\"}\")\n\t\t\t\n\t\t\t# Attempt to read decimals formatting\n\t\t\tdecimals = 0\n\t\t\ttoken = buffer.read(1)\n\t\t\tif token == \".\":\n\t\t\t\tdecimals = self.__read_number(buffer)\n\t\t\telif token:\n\t\t\t\t# Step one char back, only if we are not at the end\n\t\t\t\tbuffer.seek(-1, SEEK_CUR)\n\t\t\t\n\t\t\tblock = SpellString(block).format(self.obj, proxy=self.proxy, braced=True)\n\t\t\ttry: # FIXME\n\t\t\t\tblock = eval(block)\n\t\t\t\tif decimals:\n\t\t\t\t\tblock = round(block, decimals)\n\t\t\t\treturn \"%g\" % (block)\n\t\t\texcept Exception:\n\t\t\t\treturn \"[%s]\" % (block)\n\t\t\n\t\t# At this point, we need to check for functions and variables\n\t\t# but only if we don't already have a digit\n\t\tif not token.isdigit():\n\t\t\t_tell = buffer.tell()\n\t\t\tbuffer.seek(-1, SEEK_CUR)\n\t\t\tidentifier = self.__read_alpha(buffer)\n\t\t\t\n\t\t\tif identifier.lower() in FUNCTIONS:\n\t\t\t\targs = self.__parse_function_args(buffer)\n\t\t\t\treturn self.formatter.format_function(identifier, args)\n\t\t\t\n\t\t\tif identifier.lower() in PAPERDOLL_VALUES:\n\t\t\t\treturn self.formatter.format_paperdoll(identifier)\n\t\t\t\n\t\t\t\n\t\t\t# We didn't find any valid identifier\n\t\t\tif not identifier:\n\t\t\t\treturn \"$\"\n\t\t\t\n\t\t\t# Nothing left to check for but booleans\n\t\t\t# The values get messed with the identifier however, so we need to\n\t\t\t# look at only the first char\n\t\t\tif identifier[0] in BOOLEANS:\n\t\t\t\tidentifier = identifier[0]\n\t\t\t\tbuffer.seek(_tell)\n\t\t\t\tvalues = self.__parse_boolean(buffer)\n\t\t\t\treturn self.formatter.format_boolean(token, values)\n\t\t\n\t\t# It's probably a variable then\n\t\tbuffer.seek(-1, SEEK_CUR)\n\t\tspell, identifier, effect = self.__parse_macro(buffer)\n\t\t\n\t\tif identifier:\n\t\t\tspell = int(spell or 0)\n\t\t\teffect = int(effect or 1)\n\t\t\t\n\t\t\tvalue = self.formatter.format_macro(spell, identifier, effect)\n\t\t\tself.formatter.last_value = value\n\t\t\treturn str(value)\n\t\telse:\n\t\t\treturn \"$\"\n\t\t\n\t\tif not token or token.isspace():\n\t\t\treturn token\n\t\t\n\t\treturn token",
"def next(self):\n return self.__next__()",
"def next(self):\n return self.__next__()",
"def next(self):\n return self.__next__()",
"def next(self):\n return self.__next__()",
"def next(self):\n return self.__next__()",
"def next(self):\n return self.__next__()",
"def next(self):\n return self.__next__()",
"def __init__(self,\n source_function=sys.stdin.readline,\n finished_function=lambda x: len(x()) == 0):\n self.source_function = source_function\n self.finished_function = finished_function\n self.next_element = None",
"def next(self):\n if not self.tokens:\n return None\n else:\n return self.tokens[0]",
"def _parse(self):\n pass",
"def next(self, in_op):\n raise NotImplementedError",
"def traverse_next(page, next, results):\n for link in page.extract_links(next['follow_link']):\n print(Back.YELLOW + Fore.BLUE + \"Loading page \", link.url + Back.RESET + Fore.RESET)\n r = results.copy()\n for attribute in next['scraping'].get('data'):\n if attribute['field'] != \"\":\n print(\"\\nExtracting\", attribute['field'], \"attribute\", sep=' ')\n r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])\n if not next['scraping'].get('next'):\n yield r\n else:\n for next2 in next['scraping'].get('next'):\n for result in traverse_next(link, next2, r):\n yield result",
"def next(self): # noqa A002\n return bool(self._ll_tree.next())",
"def read_next_event(self) :\n status = HexDataIO.read_next_event(self)\n\n if not status : \n self.event_status = 0 # to break the event loop\n return False\n\n self.proc_event()\n return True",
"def Next():\n return CheckForError(lib.Generators_Get_Next())",
"def parse(self):\n\t\tfirst = None\n\t\tf = open(self.input_file)\n\t\tfor line in f.readlines():\n\t\t\tif line.startswith(\"#\"):\n\t\t\t\tcontinue\n\t\t\ttry:\n\t\t\t\tflow,t,sequence,size = line.split()\n\t\t\texcept:\n\t\t\t\tcontinue\n\t\t\t# append data to a list of tuples\n\t\t\tflow = int(flow)\n\t\t\tt = float(t)\n\t\t\tsequence = int(sequence)\n\t\t\tif size == \"x\":\n\t\t\t\tcontinue\n\t\t\tsize = int(size)\n\t\t\tif not size == 0:\n\t\t\t\tif flow == 1:\n\t\t\t\t\tself.data1.append((t,sequence,size))\n\t\t\t\telif flow == 2:\n\t\t\t\t\tself.data2.append((t,sequence,size))\n\t\t\t\telif flow == 3:\n\t\t\t\t\tself.data3.append((t, sequence, size))\n\t\t\t\telif flow == 4:\n\t\t\t\t\tself.data4.append((t, sequence, size))\n\t\t\t\telif flow == 5:\n\t\t\t\t\tself.data5.append((t, sequence, size))\n\t\t\t\telse:\n\t\t\t\t\tprint \"Erroneous data: \",flow, t, sequence, size\n\t\t\t# Keep track of the minimum and maximum time seen\n\t\t\tif not self.min_time or t < self.min_time:\n\t\t\t\tself.min_time = t\n\t\t\tif not self.max_time or t > self.max_time:\n\t\t\t\tself.max_time = t\n\n\t\t\t# print len(self.data1),len(self.data2),len(self.data3),len(self.data4),len(self.data5)",
"def advance(self):\n self.__token = \"\"\n if self.__i >= len(self.__lines):\n return\n while self.__i < len(self.__lines) and self.__lines[self.__i] in JackTokenizer.redundant: # advance as long as you see redundant chars\n self.__i += 1\n\n if self.__i >= len(self.__lines):\n return\n\n if self.__lines[self.__i] == \"\\\"\":\n self.update()\n while self.__lines[self.__i] != \"\\\"\": # str const\n self.update()\n self.update()\n return\n\n if self.__lines[self.__i].isdigit(): # int const\n while self.__lines[self.__i].isdigit():\n self.update()\n return\n\n if self.__i < (len(self.__lines) - 1) and self.__lines[self.__i:self.__i + 2] == \"//\": # comment\n while self.__i < len(self.__lines) and self.__lines[self.__i] != \"\\n\":\n self.__i += 1\n self.advance()\n return\n\n if self.__i < (len(self.__lines) - 1) and self.__lines[self.__i:self.__i + 2] == \"/*\": # comment\n self.__i += 1\n while self.__lines[self.__i:self.__i + 2] != \"*/\":\n self.__i += 1\n self.__i += 2\n self.advance()\n return\n\n if self.__i < (len(self.__lines) - 2) and self.__lines[self.__i:self.__i + 3] == \"/**\": # comment\n self.__i += 2\n while self.__lines[self.__i:self.__i + 2] != \"*/\":\n self.__i += 1\n self.__i += 2\n self.advance()\n return\n\n if self.__lines[self.__i] in JackTokenizer.symbols: # symbol\n self.update()\n return\n\n else: # other cases\n while self.__lines[self.__i] not in JackTokenizer.symbols and self.__lines[self.__i] not in \" \\t\\r\\n\":\n self.update()",
"def test():\n from pylada.dftcrystal.functional import Functional\n from pylada.dftcrystal import Crystal\n from pylada.dftcrystal.parse import parse\n parsed = parse(string)\n structure = Crystal()\n structure.read_input(parsed['rutile']['CRYSTAL'])\n a = Functional()\n a.read_input(parsed)\n assert a.scfdir \n assert a.maxcycle == 300\n assert a.exchsize == 6937578\n # need structure otherwise parse can't find beginning of input.\n otherstring = a.print_input(structure=structure)\n otherparsed = parse(otherstring)\n b = Functional()\n b.read_input(otherparsed)\n assert otherstring == b.print_input(structure=structure)",
"def __init__(self, iterator):\n self.iterator = iterator\n self.has_next = False\n self.next_val = None\n if self.iterator.hasNext():\n self.has_next = True\n self.next_val = self.iterator.next()",
"def get_next(self):\n raise NotImplementedError(\"Iterator.get_next()\")",
"def next_ele(self):\n\t\ttry:\n\t\t\tret = self._queue.get(block=True, timeout=0.5)\n\t\t\tself._queue.task_done()\n\t\t\treturn ret\n\t\texcept queue.Empty:\n\t\t\tif not self.isAlive():\n\t\t\t\traise\n\t\t\telse:\n\t\t\t\treturn None",
"def elnext(self, e: int) -> int:\n result = self._read_inline(f\"elnext({e})\")\n return int(result)",
"def has_next(self):\n # type: () -> bool\n return len(self.buffer) > 0",
"def next(self):\n if not self.has_next():\n raise StopIteration()\n if self.next_element is not None:\n result = self.next_element\n self.next_element = None\n else:\n result = self.source_function()\n return result",
"def _get_next_lexeme(self):\r\n\r\n if self._read_stack_index > 0: # Take lexeme from the stack at certain position\r\n if self._read_stack_index > len(self.lexeme_stack):\r\n self._read_stack_index = len(self.lexeme_stack) - 1\r\n lexeme = self.lexeme_stack[-self._read_stack_index]\r\n\r\n self._no_read = True\r\n self._read_stack_index -= 1\r\n elif self._no_read:\r\n lexeme = self._lexeme_buffer\r\n self._no_read = False\r\n else:\r\n lexeme = self.read_lexeme()\r\n\r\n return lexeme",
"def get_next(node):\n return node['next']",
"def test_get_next_record_flex():\n reader = csvReader.CsvReader()\n reader.column_count = 6\n reader.expected_headers = ['a', 'b', 'c', None, None, None]\n reader.flex_headers = [None, None, None, 'flex_d', 'flex_e', None]\n reader.csv_reader = csv.reader(['A,\"B\\n\",C,D,E,F'], dialect='excel', delimiter=',')\n return_dict, flex_fields = reader.get_next_record()\n assert return_dict == {'a': 'A', 'b': 'B\\n', 'c': 'C'}\n assert len(flex_fields) == 2\n assert flex_fields[0].header == 'flex_d'\n assert flex_fields[0].cell == 'D'\n assert flex_fields[1].header == 'flex_e'\n assert flex_fields[1].cell == 'E'",
"def next(self):\n if self.current and self.current.next:\n self.current = self.current.next\n return True\n return False",
"def advance(self):\n if self.instr is not None:\n self.instr.opcode = self.instr.binary[25:]\n if opcode_decode[self.instr.opcode] == 'R-type':\n self.decode_rtype()\n elif opcode_decode[self.instr.opcode] == 'I-type' or opcode_decode[self.instr.opcode] == 'Load':\n self.decode_itype()\n else:\n raise SyntaxError(\"Invalid opcode\")",
"def _load_next_file(self):\n\n if self._file_ptr == len(self.files):\n raise pipeline.PipelineStopIteration\n\n # Collect garbage to remove any prior data objects\n gc.collect()\n\n # Fetch and remove the next item in the list\n file_ = self.files[self._file_ptr]\n self._file_ptr += 1\n\n # Set up a Reader class\n rd = self._acqtype_reader[self.acqtype](file_)\n\n self.log.info(f\"Reading file {self._file_ptr} of {len(self.files)}. ({file_})\")\n data = rd.read()\n\n return data",
"def next(self):\n return self._next",
"def nextline(self):\n try:\n self.aLine = self.curFile.next()\n except:\n self.aLine = \"\"\n if( self.aLine == \"\" ): # We've reached the end of the file\n # Open the next one\n if self.nextfile():\n try:\n self.aLine = self.curFile.next()\n except:\n self.aLine = \"\"\n if self.aLine == \"\":\n print \"WrapperTruth: WARNING %s is empty. Bailing out.\"%self.wrapperList[self.fileIdx]\n return \"\"\n else:\n return \"\"\n\n # Now we have a valid file and a line is loaded\n return self.aLine",
"def on_parse(\n self,\n ) -> AsyncIteratorOrIterator[None]: # pragma: no cover # pyright: ignore\n yield None",
"def next(self):\n\n if not self.isFinished:\n if not self.isNearlyFinished: # the next line is still defined\n self.currentLine = self.nextLine.copy()\n # Update nextLine\n try:\n self.nextLine = self.it.next()\n except StopIteration:\n self.isNearlyFinished = True\n else:\n self.isFinished = True\n self.currentLine[self.idxEvals] = numpy.nan\n #TODO: the line above was not valid for the MultiArrayReader\n\n return self.currentLine",
"def _next_record(self, next_line):\n record = self.loader.parse_record_stream(self.reader,\n next_line,\n self.known_format)\n\n self.member_info = None\n\n # Track known format for faster parsing of other records\n self.known_format = record.format\n\n return record",
"def continue_from(element: Element) -> Callable[[str], XMLParseError]:\n\n def error(message: str) -> XMLParseError:\n return XMLParseError(element.file, element.opening_line, message)\n\n return error",
"def next(self) -> str:\n raise NotImplementedError",
"def next_ele(self):\n\t\ttry:\n\t\t\tret = self._queue.get(block = True, timeout=0.5)\n\t\t\tself._queue.task_done()\n\t\t\treturn ret\n\t\texcept queue.Empty:\n\t\t\tif not self.is_running():\n\t\t\t\traise\n\t\t\telse:\n\t\t\t\treturn None",
"def __next__(self):\n handle = self.handle\n\n if self._header is None:\n line = handle.readline()\n else:\n # Header we saved from when we were parsing\n # the previous alignment.\n line = self._header\n self._header = None\n\n if not line:\n raise StopIteration\n\n while line.rstrip() != \"#=======================================\":\n line = handle.readline()\n if not line:\n raise StopIteration\n\n length_of_seqs = None\n number_of_seqs = None\n ids = []\n header_dict = {}\n\n while line[0] == \"#\":\n # Read in the rest of this alignment header,\n # try and discover the number of records expected\n # and their length\n parts = line[1:].split(\":\", 1)\n key = parts[0].lower().strip()\n if key == \"aligned_sequences\":\n number_of_seqs = int(parts[1].strip())\n assert len(ids) == 0\n # Should now expect the record identifiers...\n for i in range(number_of_seqs):\n line = handle.readline()\n parts = line[1:].strip().split(\":\", 1)\n assert i + 1 == int(parts[0].strip())\n ids.append(parts[1].strip())\n assert len(ids) == number_of_seqs\n if key == \"length\":\n length_of_seqs = int(parts[1].strip())\n\n # Parse the rest of the header\n if key == \"identity\":\n header_dict[\"identity\"] = int(parts[1].strip().split(\"/\")[0])\n if key == \"similarity\":\n header_dict[\"similarity\"] = int(parts[1].strip().split(\"/\")[0])\n if key == \"gaps\":\n header_dict[\"gaps\"] = int(parts[1].strip().split(\"/\")[0])\n if key == \"score\":\n header_dict[\"score\"] = float(parts[1].strip())\n\n # And read in another line...\n line = handle.readline()\n\n if number_of_seqs is None:\n raise ValueError(\"Number of sequences missing!\")\n if length_of_seqs is None:\n raise ValueError(\"Length of sequences missing!\")\n\n if (\n self.records_per_alignment is not None\n and self.records_per_alignment != number_of_seqs\n ):\n raise ValueError(\n \"Found %i records in this alignment, told to expect %i\"\n % (number_of_seqs, self.records_per_alignment)\n )\n\n seqs = [\"\" for id in ids]\n seq_starts = []\n index = 0\n\n # Parse the seqs\n while line:\n if len(line) > 21:\n id_start = line[:21].strip().split(None, 1)\n seq_end = line[21:].strip().split(None, 1)\n if len(id_start) == 2 and len(seq_end) == 2:\n # identifier, seq start position, seq, seq end position\n # (an aligned seq is broken up into multiple lines)\n id, start = id_start\n seq, end = seq_end\n if start >= end:\n # Special case, either a single letter is present,\n # or no letters at all.\n if seq.replace(\"-\", \"\") == \"\":\n start = int(start)\n end = int(end)\n else:\n start = int(start) - 1\n end = int(end)\n else:\n assert seq.replace(\"-\", \"\") != \"\", repr(line)\n start = int(start) - 1 # python counting\n end = int(end)\n\n if index < 0 or index >= number_of_seqs:\n raise ValueError(\n \"Expected index %i in range [0,%i)\"\n % (index, number_of_seqs)\n )\n # The identifier is truncated...\n assert id == ids[index] or id == ids[index][: len(id)]\n\n if len(seq_starts) == index:\n # Record the start\n seq_starts.append(start)\n\n # Check the start...\n if start >= end:\n assert seq.replace(\"-\", \"\") == \"\", line\n elif start - seq_starts[index] != len(seqs[index].replace(\"-\", \"\")):\n raise ValueError(\n \"Found %i chars so far for sequence %i (%s, %r), line says start %i:\\n%s\"\n % (\n len(seqs[index].replace(\"-\", \"\")),\n index,\n id,\n seqs[index],\n start,\n line,\n )\n )\n seqs[index] += seq\n\n # Check the end ...\n if end != seq_starts[index] + len(seqs[index].replace(\"-\", \"\")):\n raise ValueError(\n \"Found %i chars so far for sequence %i (%s, %r, start=%i), file says end %i:\\n%s\"\n % (\n len(seqs[index].replace(\"-\", \"\")),\n index,\n id,\n seqs[index],\n seq_starts[index],\n end,\n line,\n )\n )\n\n index += 1\n if index >= number_of_seqs:\n index = 0\n else:\n # just a start value, this is just alignment annotation (?)\n # print \"Skipping: \" + line.rstrip()\n pass\n elif line.strip() == \"\":\n # Just a spacer?\n pass\n else:\n raise ValueError(\"Unrecognised EMBOSS pairwise line: %r\\n\" % line)\n\n line = handle.readline()\n if (\n line.rstrip() == \"#---------------------------------------\"\n or line.rstrip() == \"#=======================================\"\n ):\n # End of alignment\n self._header = line\n break\n\n assert index == 0\n\n if (\n self.records_per_alignment is not None\n and self.records_per_alignment != len(ids)\n ):\n raise ValueError(\n \"Found %i records in this alignment, told to expect %i\"\n % (len(ids), self.records_per_alignment)\n )\n\n records = []\n for id, seq in zip(ids, seqs):\n if len(seq) != length_of_seqs:\n # EMBOSS 2.9.0 is known to use spaces instead of minus signs\n # for leading gaps, and thus fails to parse. This old version\n # is still used as of Dec 2008 behind the EBI SOAP webservice:\n # http://www.ebi.ac.uk/Tools/webservices/wsdl/WSEmboss.wsdl\n raise ValueError(\n \"Error parsing alignment - sequences of \"\n \"different length? You could be using an \"\n \"old version of EMBOSS.\"\n )\n records.append(SeqRecord(Seq(seq), id=id, description=id))\n return MultipleSeqAlignment(records, annotations=header_dict)",
"def _read_next_alignment(self, stream):",
"def __next__(self):\n\t\treturn next()",
"def next(self):\n return self.filenum(), self.linenum(), self.tos().next()",
"def _parse(self, infile):\n raise NotImplementedError()",
"def hasNext(self) -> bool:\n return self.pointer < len(self.ordered_nodes)",
"def Parse(self):\n for element in self.elements:\n if isinstance(element, Placeholder):\n self._MatchTextPlaceholder(element)\n else:\n self._MatchNode(element)"
] | [
"0.61624813",
"0.61624813",
"0.61127424",
"0.590448",
"0.5778166",
"0.57393956",
"0.5720215",
"0.5717263",
"0.5713561",
"0.5713561",
"0.5713561",
"0.5713561",
"0.55759835",
"0.5551853",
"0.5442391",
"0.542108",
"0.54165864",
"0.5374671",
"0.5373377",
"0.5344817",
"0.53154594",
"0.5302881",
"0.52975273",
"0.52851844",
"0.5272369",
"0.52584726",
"0.52283305",
"0.52087736",
"0.5200053",
"0.5183916",
"0.5143847",
"0.5143403",
"0.51328444",
"0.5117154",
"0.50968456",
"0.5095988",
"0.5094932",
"0.5084316",
"0.50691587",
"0.50397444",
"0.50392765",
"0.50316703",
"0.50312257",
"0.5022625",
"0.5006322",
"0.49975914",
"0.4983952",
"0.49833998",
"0.4978071",
"0.497597",
"0.49754828",
"0.49619544",
"0.49542388",
"0.49433443",
"0.49418497",
"0.4925944",
"0.4925944",
"0.4925944",
"0.4925944",
"0.4925944",
"0.4925944",
"0.4925944",
"0.49232522",
"0.49037567",
"0.49013987",
"0.48988864",
"0.48903573",
"0.48768175",
"0.48753807",
"0.4872318",
"0.48673907",
"0.48579454",
"0.48577672",
"0.484324",
"0.48383304",
"0.4838274",
"0.48348853",
"0.48339468",
"0.48339054",
"0.48308092",
"0.48284954",
"0.48238197",
"0.48225456",
"0.4813988",
"0.48122203",
"0.48102728",
"0.48028862",
"0.48014826",
"0.47998434",
"0.47935402",
"0.47900057",
"0.47894487",
"0.4779296",
"0.47681513",
"0.47599798",
"0.47580963",
"0.4754973",
"0.47483253",
"0.47482032",
"0.4745634"
] | 0.55927616 | 12 |
Create or Delete an Override. Since we have the concept of a default for any given preference we only need to store items that are different than the default. So when we 'update' a preference we first check to see if the value is equal to the default and either drop the request on the floor or remove the custom override we have set. | def update(
cls,
auth_token: str,
site_url: str,
key: str,
value: Any,
) -> Preference:
user_id = cls.get_user_id(site_url=site_url, token=auth_token)
site_default = SiteController.default_for_key(site_url=site_url, key=key)
# First we need to check that the value is the correct type and
# determine if we need to add/remove an override for the value.
default_for_site = site_default.serialize()
kind = site_default.kind
try:
serialize_custom_value = site_default.serialize(value)
except Exception:
raise ValueError(f'Expected {kind} for key {key}')
reset_to_default = (default_for_site == serialize_custom_value)
if reset_to_default:
Override.objects.filter(
site__pk=site_url,
user_id=user_id,
key=key,
).delete()
return site_default.to_preference(user_id=user_id)
site = SiteController.get(site_url=site_url)
override, _created = Override.objects.update_or_create(
site=site,
user_id=user_id,
kind=kind,
key=key,
defaults={'value': serialize_custom_value}
)
return override.to_preference() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def override_dict(d, **overrides):\n to_delete = []\n to_restore = {}\n try:\n for k, v in overrides.items():\n if k in d:\n to_restore[k] = d[k]\n else:\n to_delete.append(k)\n if v is None:\n d.pop(k, None) # Delete key k, tolerating it being already gone\n else:\n d[k] = v\n yield\n finally:\n for k in to_delete:\n d.pop(k, None) # Delete key k, tolerating it being already gone\n for k, v in to_restore.items():\n d[k] = v",
"def process_overrides(self, db, dest, kvargs, lines):\n logging.info(\"process_overrides db:{} dest:{} kvargs:{} \".format(db.name,dest,kvargs))\n keyword = kvargs['keyword']\n db.create_overrides(keyword)\n return True",
"def set_override(self, name, override, group=None):\n opt_info = self._get_opt_info(name, group)\n opt_info['override'] = self._get_enforced_type_value(\n opt_info['opt'], override)\n opt_info['location'] = LocationInfo(\n Locations.set_override,\n _get_caller_detail(3), # this function has a decorator to skip\n )",
"def override(record: Dict, overrides: Dict):\n for key in overrides.keys():\n if key not in record:\n record[key] = overrides[key]\n else:\n existing_value = record[key]\n overwrite_value = overrides[key]\n if isinstance(existing_value, dict):\n override(existing_value, overwrite_value)\n else:\n record[key] = overwrite_value",
"def makeOverrides(self):\n\t\tself.overridesWithValues = self.dataOverrides",
"def set_default_binding_overrides(self, overrides):\n self.__default_binding_overrides = overrides or {}",
"def add_over(self, override: 'Item') -> None:\n # Copy over all_conf always.\n self.all_conf = lazy_conf.concat(self.all_conf, override.all_conf)\n\n self.folders.update(override.folders)\n\n for ver_id, version in override.versions.items():\n if ver_id not in self.versions:\n # We don't have that version!\n self.versions[ver_id] = version\n else:\n our_ver = self.versions[ver_id]\n for sty_id, style in version.styles.items():\n if sty_id not in our_ver.styles:\n # We don't have that style!\n our_ver.styles[sty_id] = style\n our_ver.inherit_kind[sty_id] = version.inherit_kind[sty_id]\n else:\n raise ValueError(\n 'Two definitions for item folder {}.{}.{}',\n self.id,\n ver_id,\n sty_id,\n )\n # our_style.override_from_folder(style)",
"def _override(config, overrides):\n for key, value in overrides.iteritems():\n if key not in config:\n raise KeyError(\"Unrecognized parameter: %s\" % key)\n config[key] = value",
"def update_overrides(self, app, name, namespace,\n flag='reset', override_values=None):\n if override_values is None:\n override_values = {}\n body = {'flag': flag, 'values': override_values, 'attributes': {}}\n return self._update(self._path(app) +\n '?name=' + name +\n '&namespace=' + namespace, body)",
"def add_override_flags(parser):\n override_group = parser.add_mutually_exclusive_group(required=False)\n override_group.add_argument('--override', action='store_true', dest='override',\n help='Allow overriding values in input file with values from CLI arguments. '\n 'Overriding values is disallowed by default. '\n 'Adding the --no-override flag explicitly disallows overriding values.')\n override_group.add_argument('--no-override', action='store_false', dest='override', help=argparse.SUPPRESS)",
"def appdefs_overrides(self, appdef_id, override_id, data, tenant_id=None, api_version=\"v2.3\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.controller\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/appdefs/{}/overrides/{}\".format(api_version,\n tenant_id,\n appdef_id,\n override_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"put\", data=data)",
"def withOverrides(overrides):",
"def set_override(self, charger, override_time,\n energy_at_plugin, energy_to_add):\n data = {\n \"device_id\": self.uuid,\n \"cmd\": \"set_override\",\n \"token\": charger.token(),\n \"account_token\": self.api_token,\n \"override_time\": override_time,\n \"energy_at_plugin\": energy_at_plugin,\n \"energy_to_add\": energy_to_add\n }\n\n headers = {\n \"Content-Type\": \"application/json\"\n }\n\n response = requests.post(\"{}/box_api_secure\".format(self.BASE_URL),\n data=json.dumps(data),\n headers=headers)\n response_json = response.json()\n return response_json",
"def create_default(cls):\n raise NotImplementedError(common.OVERRIDE_MESSAGE)",
"def add_override_argument(parser, *names, **kwargs):\r\n if not names:\r\n names = DEFAULT_OVERRIDE_OPTION_NAMES\r\n dest = kwargs.pop('dest', None)\r\n required = kwargs.pop('required', False)\r\n help = kwargs.pop('help', 'extra overrides to apply to the config')\r\n if kwargs:\r\n raise TypeError('add_override_argument() got an invalid keyword argument: %s' %\r\n list(kwargs)[0])\r\n\r\n ov_container = ConfigContainer()\r\n ov_container.get_metadata().is_override_set = True\r\n parser.add_argument(\r\n *names,\r\n dest=dest,\r\n default=ov_container,\r\n required=required,\r\n action=_add_to_override_set,\r\n type=_dict_from_string,\r\n help=help\r\n )",
"def make_override(recipe, override_dir):\n print \"Making override for %s\" % recipe\n command = [\"/usr/local/bin/autopkg\", \"make-override\", recipe]\n if override_dir:\n command.insert(2, \"--override-dir=%s\" %\n os.path.realpath(override_dir))\n # autopkg will offer to search for missing recipes, and wait for\n # input. Therefore, we use a short timeout to just skip any\n # recipes that are (probably) hung up on the prompt.\n proc = Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n try:\n output, error = proc.communicate(timeout=3)\n except TimeoutError:\n print_error(\n \"\\tPlease ensure you have the recipe file for %s.\" % recipe)\n return None\n\n failure_string = \"An override plist already exists at\"\n if failure_string in error:\n print_error(\"\\t\" + error.strip())\n return None\n\n return output[output.find(\"/\"):].strip()",
"def apply_dict_overrides(dictionary: dict, **overrides) -> dict:\n # I'm not entirely sure the treatment of None is the right thing. Need to look into that.\n # Then again, if None were stored, then apply_dict_overrides(d, var1=1, var2=2, var3=None)\n # would be no different than (dict(d, var1=1, var2=2, var3=None). It might be more useful\n # and/or interesting if it would actually remove the key instead. -kmp 18-Jul-2020\n for k, v in overrides.items():\n if v is not None:\n dictionary[k] = v\n # This function works by side effect, but getting back the changed dict may be sometimes useful.\n return dictionary",
"def add_over(self, override: ItemConfig) -> None:\n self.all_conf = lazy_conf.concat(self.all_conf, override.all_conf)\n\n for vers_id, styles in override.versions.items():\n our_styles = self.versions.setdefault(vers_id, {})\n for sty_id, style in styles.items():\n if sty_id not in our_styles:\n our_styles[sty_id] = style\n else:\n our_styles[sty_id] = lazy_conf.concat(our_styles[sty_id], style)",
"def _unset_defaults_and_overrides(self):\n for info, group in self._all_opt_infos():\n info.pop('default', None)\n info.pop('override', None)",
"def backup_overrides(overrides, action='debug'):\n backup_path = os.path.join(BACKUP_PATH, action)\n if not os.path.exists(backup_path):\n os.makedirs(backup_path)\n field = 'user_overrides'\n for chart in overrides:\n name = chart['name']\n if name not in oidc_charts:\n LOG.warning(\"oidc-auth-apps: mismatch chart name '%s'\", name)\n if chart[field]:\n document = yaml.safe_load(chart[field])\n if not document:\n LOG.debug(\"oidc-auth-apps: %s empty document\", name)\n continue\n backup_f = '_'.join([name, field])\n backup_f = '.'.join([backup_f, 'yaml'])\n backup_f = os.path.join(backup_path, backup_f)\n try:\n with open(backup_f, 'w') as file:\n yaml.dump(document, file, default_flow_style=False)\n except IOError as e:\n LOG.error(\"oidc-auth-apps: IOError: %s; file: %s\", e, backup_f)\n return 1\n LOG.info(\"oidc-auth-apps: user_overrides backed up to %s\", backup_path)\n return 0",
"def apply_current_or_orig_values(override, current_version, args):\n keys = args.keys\n if current_version:\n print \"\\tUsing metadata values from {} version {}.\".format(\n current_version[\"name\"], current_version[\"version\"])\n for key in keys:\n current_val = current_version.get(key)\n if current_val:\n override[\"Input\"][\"pkginfo\"][key] = current_val\n else:\n default = override[\"Input_Original\"].get(\n \"pkginfo\", {}).get(key, \"\")\n choice = \"\"\n if not args.no_prompt:\n print \"\\tNo current '%s' value found to apply.\" % key\n print \"\\tRecipe specifies: '%s'\" % default\n choice = raw_input(\"\\tHit enter to use the recipe value, or \"\n \"enter a new value: \")\n override[\"Input\"][\"pkginfo\"][key] = (\n default if choice == \"\" else choice)",
"def _overrides(self, tense, overrides, attr_name,persons=None): \n if not hasattr(self, attr_name):\n self_overrides = [ None ] * len(Tense)\n setattr(self, attr_name, self_overrides) \n else:\n self_overrides = getattr(self, attr_name)\n \n if tense in Tense.Person_Agnostic():\n if isinstance(overrides, str) or self_overrides[tense] is None:\n self_overrides[tense] = [ overrides ]\n else:\n self_overrides[tense].append(overrides)\n return\n \n if persons is None:\n _persons = Person\n elif isinstance(persons, int):\n _persons = [ persons ]\n elif isinstance(persons, list):\n _persons = persons\n else:\n self.__raise(\"persons must be None, integer or list of integers\", tense)\n \n if self_overrides[tense] is None:\n self_overrides[tense] = [None] * len(Person)\n \n if isinstance(overrides, str) or inspect.isfunction(overrides) or inspect.ismethod(overrides): \n for person in _persons:\n if isinstance(overrides, str) or self_overrides[tense][person] is None:\n # if a hard replacement (string), previous overrides are discarded because they will be replaced.\n # or this is the first override\n self_overrides[tense][person] = [overrides]\n else:\n self_overrides[tense][person].append(overrides) \n \n elif isinstance(overrides, list):\n for person, override in enumerate(overrides):\n if override is not None:\n if isinstance(override, str) or self_overrides[tense][person] is None:\n # if a hard replacement (string), previous overrides are discarded because they will be replaced.\n # or this is the first override\n self_overrides[tense][person] = [override]\n else:\n self_overrides[tense][person].append(override)",
"def create_default_options(sender, **kwargs):\n from options.models import Option\n\n for key, data in six.iteritems(DEFAULT_OPTIONS):\n if not Option.objects.filter(name=key).exists():\n try:\n Option.objects.create(name=key, **data)\n except IntegrityError:\n logger.warning(\"Option '%s' already installed\" % key)",
"def override_options(\n config: DictLike,\n selected_options: Tuple[Any, ...],\n set_of_possible_options: Tuple[Iterable[Tuple[str, Any]], ...],\n config_containing_override: Optional[DictLike] = None,\n) -> DictLike:\n if config_containing_override is None:\n config_containing_override = config\n override_opts = config_containing_override.pop(\"override\")\n override_dict = determine_override_options(selected_options, override_opts, set_of_possible_options)\n logger.debug(f\"override_dict: {override_dict}\")\n\n # Set the configuration values to those specified in the override options\n # Cannot just use update() on config because we need to maintain the anchors.\n for k, v in override_dict.items():\n # Check if key is there and if it is not None! (The second part is important)\n if k in config:\n try:\n # If it has an anchor, we know that we want to preserve the type. So we check for the anchor\n # by trying to access it (Note that we don't actually care what the value is - just that it\n # exists). If it fails with an AttributeError, then we know we can just assign the value. If it\n # has an anchor, then we want to preserve the anchor information.\n config[k].anchor\n logger.debug(f\"type: {type(config[k])}, k: {k}\")\n if isinstance(config[k], list):\n # Clear out the existing list entries\n del config[k][:]\n if isinstance(override_dict[k], (str, int, float, bool)):\n # We have to treat str carefully because it is an iterable, but it will be expanded as\n # individual characters if it's treated the same as a list, which is not the desired\n # behavior! If we wrap it in [], then it will be treated as the only entry in the list\n # NOTE: We also treat the basic types this way because they will be passed this way if\n # overriding indirectly with anchors (since the basic scalar types don't yet support\n # reassignment while maintaining their anchors).\n config[k].append(override_dict[k])\n else:\n # Here we just assign all entries of the list to all entries of override_dict[k]\n config[k].extend(override_dict[k])\n elif isinstance(config[k], dict):\n # Clear out the existing entries because we are trying to replace everything\n # Then we can simply update the dict with our new values\n config[k].clear()\n config[k].update(override_dict[k])\n elif isinstance(config[k], (int, float, bool)):\n # This isn't really very good (since we lose information), but there's nothing that can be done\n # about it at the moment (Dec 2018)\n logger.debug(\"Overwriting YAML anchor object. It is currently unclear how to reassign this value.\")\n config[k] = v\n else:\n # Raise a value error on all of the cases that we aren't already aware of.\n raise ValueError(\n f\"Object {k} (type {type(config[k])}) somehow has an anchor, but is something other than a list or dict. Attempting to assign directly to it.\"\n )\n except AttributeError:\n # If no anchor, just overwrite the value at this key\n config[k] = v\n else:\n raise KeyError(k, f'Trying to override key \"{k}\" that it is not in the config.')\n\n return config",
"def storeUnloadedDefaultsOnly( self ):\n \n unStoredKeys= [ aKey \n for aKey in self._defDict.keys() \n if aKey not in self._loadedDefaults ]\n if len( unStoredKeys ) == 0:\n return\n \n # keep track of what has been loaded\n [ self._loadedDefaults.append( aKey ) for aKey in unStoredKeys ]\n \n # get the data \n data= [ self._defDict[ aKey ] for aKey in unStoredKeys ] \n \n # loading only unloaded\n tempDict= self._prefObj.load( group= self.prefGroup, \\\n name= unStoredKeys, default= data )\n \n # add if already not a field\n addDict= { aKey.split(\"/\")[-1]: tempDict[aKey] \n if aKey not in self.__dict__ \n else warnings.warn( \"\\\"\" + aKey + \"\\\" is already stored in the data, \" + \\\n \"Will not updated this field with unstored default\" )\n for aKey in tempDict }\n \n self.__dict__.update( addDict )",
"def defaults_provider():\n return getattr(defaults_provider, 'overrides', {})",
"def override_paramset(self, override_str):\n\n paramset = ParamSet()\n if not override_str:\n return paramset\n\n override = eval(override_str, {}, {})\n if not override:\n return paramset\n\n for override_name in override:\n # The override can have a node_name/parm format which allows for point\n # instance overrides to override parms in a network.\n\n cached_override = self.override_cache.get(override_name, None)\n if cached_override is not None:\n # Hint to just skip\n if cached_override == -1:\n continue\n if isinstance(cached_override, PBRTParam):\n # textures which can't be overriden\n paramset.add(cached_override)\n continue\n pbrt_name, pbrt_type, tuple_names = cached_override\n if tuple_names:\n value = [override[x] for x in tuple_names]\n else:\n value = override[override_name]\n pbrt_param = PBRTParam(pbrt_type, pbrt_name, value)\n paramset.add(pbrt_param)\n continue\n\n override_match = self.override_pat.match(override_name)\n spectrum_type = override_match.group(\"spectrum\")\n parm_name = override_match.group(\"parm\")\n override_node = override_match.group(\"node\")\n if override_node is not None and override_node != self.name:\n self.override_cache[override_name] = -1\n continue\n\n # There can be two style of \"overrides\" one is a straight parm override\n # which is similar to what Houdini does. The other style of override is\n # for the spectrum type parms. Since spectrum parms can be of different\n # types and the Material Overrides only support \"rgb\" we are limited\n # in the types of spectrum overrides we can do. To work around this we'll\n # support a different style, override_parm:spectrum_type. If the parm name\n # ends in one of the \"rgb/color\" types then we'll handle it differently.\n # TODO add a comment as to what the value would look like\n\n # NOTE: The material SOP will use a parm style dictionary if there\n # parm name matches exactly\n # ie) if there is a color parm you will get\n # {'colorb':0.372511,'colorg':0.642467,'colorr':0.632117,}\n # But if the parm name doesn't match (which we are allowing\n # for you will get something like this -\n # {'colora':(0.632117,0.642467,0.372511),}\n\n # Once we have a parm name, we need to determine what \"style\" it is.\n # Whether its a hou.ParmTuple or hou.Parm style.\n tuple_names = tuple()\n parm_tuple = self.node.parmTuple(parm_name)\n if parm_tuple is None:\n # We couldn't find a tuple of that name, so let's try a parm\n parm = self.node.parm(parm_name)\n if parm is None:\n # Nope, not valid either, let's move along\n self.override_cache[override_name] = -1\n continue\n # if its a parm but not a parmtuple it must be a split.\n parm_tuple = parm.tuple()\n # we need to \"combine\" these and process them all at once and\n # then skip any other occurances. The skipping is handled by\n # the overall caching mechanism. self.override_cache\n tuple_names = tuple([x.name() for x in parm_tuple])\n\n # This is for wrangling parm names of texture nodes due to having a\n # signature parm.\n pbrt_parm_name = self.pbrt_parm_name(parm_tuple.name())\n\n if spectrum_type is None and tuple_names:\n # This is a \"traditional\" override, no spectrum or node name prefix\n value = [override[x] for x in tuple_names]\n pbrt_param = self._hou_parm_to_pbrt_param(\n parm_tuple, pbrt_parm_name, value\n )\n elif spectrum_type in (\"spectrum\", \"xyz\", \"blackbody\"):\n pbrt_param = PBRTParam(\n spectrum_type, pbrt_parm_name, override[override_name]\n )\n elif not tuple_names:\n pbrt_param = self._hou_parm_to_pbrt_param(\n parm_tuple, pbrt_parm_name, override[override_name]\n )\n else:\n raise ValueError(\"Unable to wrangle override name: %s\" % override_name)\n\n paramset.add(pbrt_param)\n\n # From here to the end of the loop is to allow for caching\n\n if pbrt_param.type == \"texture\":\n self.override_cache[override_name] = pbrt_param\n continue\n\n # we are making an assumption a split parm will never be a spectrum\n # or have a node prefix. The Material SOP doesn't allow for it as well.\n for name in tuple_names:\n # The -1 means \"continue\"\n self.override_cache[name] = -1\n # Sanity check\n if tuple_names and override_name not in tuple_names:\n raise ValueError(\n \"Override name: %s, not valid for a parmTuple\" % override_name\n )\n # override_name must match one of the tuple_names\n self.override_cache[override_name] = (\n pbrt_param.name,\n pbrt_param.param_type,\n tuple_names,\n )\n return paramset",
"def clear_override(self, name, group=None):\n opt_info = self._get_opt_info(name, group)\n opt_info.pop('override', None)",
"def get_flag_with_override(self, field_name, override_value):\n publication = self.get_parent_object_of_type(\"Publication\")\n current = self.context\n value = not override_value\n if publication is not None:\n while current != publication.aq_parent:\n schema = current.Schema()\n if field_name in schema:\n field = schema.get(field_name)\n value = field.get(current)\n if value is override_value:\n break\n current = current.aq_parent\n return value",
"def overrides(self, overrides: ConfigNodePropertyArray):\n\n self._overrides = overrides",
"def addOverride(cls, override):\n assert cls not in locationTypeOverrides_\n assert not isinstance(override, tuple)\n\n locationTypeOverrides_[cls] = override\n locationTypes_[override] = cls",
"def setdefault(pb_or_dict, key, value):\n if not get(pb_or_dict, key, default=None):\n set(pb_or_dict, key, value)",
"def default(self, name, new=None, erase=False):\n # Check existence\n if name not in self._defaults:\n raise tools.UnavailableException(self._defaults, name, what=\"model default\")\n # Get current\n old = self._defaults[name]\n # Set if needed\n if erase or new is not None:\n self._defaults[name] = new\n # Return current/old\n return old",
"def test_overwrite(self):\n set_default_for_missing_keys('hello world')\n set_default_for_missing_keys(123, overwrite=True)\n\n assert DotWizPlus().missing_key == 123",
"def conditional_overrides(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GoogleCloudChannelV1ConditionalOverrideArgs']]]]:\n return pulumi.get(self, \"conditional_overrides\")",
"def override_default_kwargs(**overrides):\n def decorator(function):\n @functools.wraps(function)\n def replacement(*args, **kwargs):\n # Allow our default kwargs to be overriden if specified\n final_kwargs = copy.deepcopy(overrides)\n final_kwargs.update(**kwargs)\n return function(*args, **final_kwargs)\n return replacement\n return decorator",
"def isOverride(self) -> bool:\n ...",
"def testOverrideOfOneItem(self):\n\t\tc = Controller()\n\t\tx = KlassBeingMocked()\n\t\tx.f = 38\n\t\tc.override(x, 'f', 5)\n\t\tself.failUnless(x.f == 5)\n\t\tc.restore()\n\t\tself.failUnless(x.f == 38)",
"def delete_overrides(self, app, name, namespace):\n return self._delete(self._path(app) +\n '?name=' + name +\n '&namespace=' + namespace)",
"def override(overridden):\n def decorator(overriding):\n overridden.override(overriding)\n return overriding\n return decorator",
"def file_override(filename, *args, **kwargs):\n return self.new_file_override()",
"def convert_overrides(overrides, conn):\n LOG.info(\"oidc-auth-apps: converting dex overrides\")\n if not validate_overrides(overrides):\n return 1\n new_doc = merge_new_overrides()\n res = backup_overrides(overrides, action='migrate')\n if res != 0:\n return res\n # replace the dex user overrides\n new_str = yaml.dump(new_doc, default_flow_style=False)\n for override in overrides:\n if override['name'] == 'dex':\n override['user_overrides'] = new_str\n res = backup_overrides(overrides, action='converted')\n return res",
"def ensure_exists(dictionary, key, default_value=None):\n if key not in dictionary:\n dictionary[key] = copy.deepcopy(default_value)",
"def parm_values(overrides):\n\n originals = []\n try:\n for parm, value in overrides:\n originals.append((parm, parm.eval()))\n parm.set(value)\n yield\n finally:\n for parm, value in originals:\n # Parameter might not exist anymore so first\n # check whether it's still valid\n if hou.parm(parm.path()):\n parm.set(value)",
"def process_conjugation_override(self, conjugation_override): \n if conjugation_override is None:\n return True\n elif isinstance(conjugation_override, ConjugationOverride):\n override = conjugation_override \n elif len(conjugation_override) > 1:\n doNotApply = conjugation_override[0] == '-'\n self._explicit_no_root_verb = conjugation_override[0] == '!' \n if self.explicit_no_root_verb:\n # we don't do anything with this right now .. but in future note this in the output\n self.base_verb_string = conjugation_override\n return True\n if doNotApply: \n # we continue processing to make sure the override name was correct.\n lookup_key = conjugation_override[1:]\n else:\n lookup_key = conjugation_override\n if lookup_key in Standard_Overrides:\n override = Standard_Overrides[lookup_key]\n elif lookup_key in Dependent_Standard_Overrides:\n override = Dependent_Standard_Overrides[lookup_key]\n else:\n self.__raise(lookup_key+\": override is not one of \"+repr(list(Standard_Overrides.keys()))+\" or \"+repr(list(Dependent_Standard_Overrides.keys())))\n# if override is None:\n# self.__raise(\"no override with key \", lookup_key)\n if doNotApply:\n self.add_doNotApply(override.key)\n return False\n else:\n #No override or blank\n return False\n if self.canApply(override):\n if override.key:\n # Custom overrides do not always have keys\n self.add_applied_override(override.key)\n override.apply(self)\n return True\n else:\n return False",
"def override(app_list, override_list):\n\n for command in override_list:\n app_name = command[0]\n\n # Find if app_name is a valid app to override.\n class_obj = next((i for i in app_list if i.__name__ == app_name),\n default=None)\n\n if not class_obj:\n print('Could not override for app {}'.format(app_name))\n continue\n\n if command[1] != '_':\n new_args = literal_eval(command[1])\n if isinstance(new_args, tuple):\n ARGS[app_name] = new_args\n else:\n print('Malformed args for app {}'.format(app_name))\n\n if command[2] != '_':\n try:\n new_kwargs = json.loads(command[2])\n KWARGS[app_name] = new_kwargs\n except json.JSONDecodeError:\n print('Malformed kwargs for app {}'.format(app_name))",
"def shrinkage_overrides(self, shrinkage_overrides):\n \n self._shrinkage_overrides = shrinkage_overrides",
"def determine_override_options(\n selected_options: Tuple[Any, ...],\n override_opts: DictLike,\n set_of_possible_options: Optional[Tuple[Iterable[Tuple[str, Any]], ...]] = None,\n) -> Dict[str, Any]:\n # Validation\n if set_of_possible_options is None:\n set_of_possible_options = ()\n\n override_dict: Dict[str, Any] = {}\n for option in override_opts:\n # We need to cast the option to a string to effectively compare to the selected option,\n # since only some of the options will already be strings\n if str(option) in list(map(lambda opt: str(opt), selected_options)):\n override_dict.update(\n determine_override_options(selected_options, override_opts[option], set_of_possible_options)\n )\n else:\n logger.debug(f\"override_opts: {override_opts}\")\n # Look for whether the key is one of the possible but unselected options.\n # If so, we haven't selected it for this analysis, and therefore they should be ignored.\n # NOTE: We compare both the names and value because sometimes the name is not sufficient,\n # such as in the case of the energy (because a number is not allowed to be a field name.)\n found_as_possible_option = False\n for possible_options in set_of_possible_options:\n # Same type of comparison as above, but for all possible options instead of the selected\n # options.\n if str(option) in list(map(lambda opt: str(opt), possible_options)):\n found_as_possible_option = True\n # Below is more or less equivalent to the above (although .str() hides the details or\n # whether we should compare to the name or the value in the enum and only compares against\n # the designated value).\n # for possible_opt in possible_options:\n # if possible_opt.name == option or possible_opt.value == option:\n # found_as_possible_option = True\n\n if not found_as_possible_option:\n # Store the override value, since it doesn't correspond with a selected option or a possible\n # option and therefore must be an option that we want to override.\n logger.debug(f'Storing override option \"{option}\", with value \"{override_opts[option]}\"')\n override_dict[option] = override_opts[option]\n else:\n logger.debug(f'Found option \"{option}\" as possible option, so skipping!')\n\n return override_dict",
"def resetStoredDefaults( self ):\n keys= list( self._defDict.keys() )\n data= [ self._defDict[ aKey ] for aKey in keys ]\n \n self.prefObj.save( group= self.prefGroup, name= keys, data= data )\n self.resetSelfWithDefaults()",
"def _default(self, section, option, default):\r\n if not self.has_section(section):\r\n self.add_section(section)\r\n if not self.has_option(section, option):\r\n self.set(section, option, default)\r\n self.save()",
"def apply_overrides(self, parent_values):\n raise NotImplementedError(\n \"{} does not have implemented `apply_overrides`\".format(self)\n )",
"def can_override_user(request):\n if not hasattr(request, \"can_override_user\"):\n request.can_override_user = can_override()\n return request.can_override_user",
"def testOverrideOfTwoItems(self):\n\t\tc = Controller()\n\t\tx = KlassBeingMocked()\n\t\ty = KlassBeingMocked()\n\t\tx.f = 38\n\t\ty.g = 39\n\t\tc.override(x, 'f', 5)\n\t\tc.override(y, 'g', 6)\n\t\tself.failUnless(x.f == 5)\n\t\tself.failUnless(y.g == 6)\n\t\tc.restore()\n\t\tself.failUnless(x.f == 38)\n\t\tself.failUnless(y.g == 39)",
"def __init__(__self__, *,\n override_settings: Optional[pulumi.Input['UpgradeOverrideSettingsArgs']] = None):\n if override_settings is not None:\n pulumi.set(__self__, \"override_settings\", override_settings)",
"def save_defaults(self, overwrite=False):\r\n for (section, option), value in self.defaults.iteritems():\r\n if value is None:\r\n continue\r\n if section not in self.__config:\r\n self.__config[section] = {}\r\n if overwrite or option not in self.__config[section]:\r\n self.__config[section][option] = value\r\n self.save()",
"def process_overrides(recipes, args, production_cat, pkginfo_template):\n for recipe in recipes:\n print SEPARATOR\n\n if recipe in RECIPE_EXCLUSIONS:\n print_error(\"Not overriding %s because it is in the list of \"\n \"exclusions.\" % recipe)\n continue\n if recipe.startswith(\"local\"):\n print_error(\"Not overriding %s because it _is_ an override.\" %\n recipe)\n continue\n\n override_path = make_override(recipe, args.override_dir)\n if override_path is None:\n continue\n\n # Copy just-generated override's Input section to Input_Original.\n override = FoundationPlist.readPlist(override_path)\n override[\"Input_Original\"] = override[\"Input\"]\n override[\"Input\"] = {}\n override[\"Input\"][\"pkginfo\"] = {}\n\n current_version = get_current_production_version(\n production_cat, override, args)\n apply_current_or_orig_values(override, current_version, args)\n\n if not args.suppress_subdir:\n copy_package_path_to_input(override, current_version, args)\n\n if pkginfo_template:\n apply_pkginfo_template(override, pkginfo_template)\n\n FoundationPlist.writePlist(override, override_path)",
"def apply_overrides(manifest_json, overrides, marketplace):\n manifest_json = copy.deepcopy(manifest_json)\n if overrides is None:\n return manifest_json\n\n if \"title\" in overrides:\n manifest_json[\"info\"][\"title\"] = overrides[\"title\"]\n if \"description\" in overrides:\n manifest_json[\"info\"][\"description\"] = overrides[\"description\"]\n if \"price\" in overrides:\n invalid_price_format = \"Price should be a non-negative integer.\"\n try:\n price = int(overrides[\"price\"])\n manifest_json[\"info\"][\"x-21-total-price\"][\"min\"] = price\n manifest_json[\"info\"][\"x-21-total-price\"][\"max\"] = price\n if price < 0:\n raise exceptions.ValidationError(invalid_price_format)\n except ValueError:\n raise exceptions.ValidationError(invalid_price_format)\n if \"name\" in overrides:\n manifest_json[\"info\"][\"contact\"][\"name\"] = overrides[\"name\"]\n if \"email\" in overrides:\n manifest_json[\"info\"][\"contact\"][\"email\"] = overrides[\"email\"]\n if \"host\" in overrides:\n manifest_json[\"host\"] = overrides[\"host\"]\n if \"port\" in overrides:\n host = manifest_json[\"host\"]\n # if the host is in the form of https://x.com/ remove the trailing slash\n host = host.strip(\"/\")\n invalid_port_format = \"Port should be an integer between 0 and 65536.\"\n try:\n port = int(overrides[\"port\"])\n if port <= 0 or port > 65536:\n raise exceptions.ValidationError(invalid_port_format)\n except ValueError:\n raise exceptions.ValidationError(invalid_port_format)\n host += \":{}\".format(port)\n manifest_json[\"host\"] = host\n if \"basePath\" in overrides:\n manifest_json[\"basePath\"] = overrides[\"basePath\"]\n\n return manifest_json",
"def testSetDefaultExists(self):\n ref = cache.CacheReference(self.cache, 'key')\n self.cache._KeyExists.return_value = True\n ref.SetDefault('/foo')\n self.assertFalse(self.cache._Insert.called)",
"def patch_ini_file(defaultfile, customfile, PATCH=True, EXACT=False):\n default_options = SafeConfigParser()\n result = default_options.read(defaultfile) # returns an empty list if file error\n if result == []:\n raise IOError\n\n personalized_options = SafeConfigParser()\n result = personalized_options.read(customfile)\n if result == []:\n raise IOError\n\n personalized_options_have_been_modified = False\n personalized_and_default_are_same = True\n\n #iterate through default file and compare customfile for existance of each field\n for section_name in default_options.sections():\n if personalized_options.has_section(section_name) == False:\n personalized_options.add_section(section_name) #create section name in customfile\n personalized_options_have_been_modified = True\n personalized_and_default_are_same = False\n for option_name in default_options.options(section_name):\n defaultvalue = default_options.get(section_name, option_name)\n if personalized_options.has_option(section_name, option_name) == False:\n personalized_options.set(section_name, option_name, defaultvalue) #create name/value pair in customfile\n personalized_options_have_been_modified = True\n personalized_and_default_are_same = False\n else:\n if personalized_options.get(section_name, option_name) != defaultvalue:\n personalized_and_default_are_same = False #keep track of dissimilarities in files\n\n # local(in memory) copy of customfile now contains all sections and options of defaultfile.\n if personalized_options_have_been_modified and PATCH:\n with open(customfile, \"wb\") as config_file:\n if EXACT: #with PATCH and EXACT we discard the old customfile and replace with defaultfile\n default_options.write(config_file)\n else: #otherwise with PATCH only we write the patched version of customfile to storage\n personalized_options.write(config_file)\n\n #for purposes of comparison of files regardless of patching return True/False\n if EXACT:\n return filecompare(defaultfile, customfile)\n else:\n return personalized_and_default_are_same",
"def override(self, default: Optional[str] = None) -> Optional[str]:\n return self.type_override if self.type_override else default",
"def inject_overrides(self, overrides):\n for run in self.benchmarks:\n _ = [run.update_spec(key, value) for key, value in overrides.items()]",
"def user_data_override(self) -> Optional[str]:\n return pulumi.get(self, \"user_data_override\")",
"def update_with_defaults(**kwargs):\n # Update the defaults with the input values\n with open(DEFAULTS, \"r\") as f:\n defaults = json.load(f)\n return _update(kwargs, defaults)",
"def update(self, option_old, option_new=\"\"):\n if option_old == option_new:\n return\n self.pile_list.remove(option_old)\n if option_new != \"\":\n self.pile_list.append(option_new)",
"def set_default_templates(sender, **kwargs):\n profile = kwargs[\"instance\"]\n if kwargs[\"created\"]:\n if (profile.expungement_petition_template is None and \n ExpungementPetitionTemplate.objects.filter(default__exact=True).count() == 1):\n profile.expungement_petition_template = (ExpungementPetitionTemplate\n .objects\n .filter(default__exact=True)\n .all()[0])\n if (profile.sealing_petition_template is None and \n SealingPetitionTemplate.objects.filter(default__exact=True).count() == 1):\n profile.sealing_petition_template = (SealingPetitionTemplate\n .objects\n .filter(default__exact=True)\n .all()[0])\n\n profile.save()",
"def menuSetPref(*args, exists: bool=True, force: bool=True, loadAll: bool=True, removeAll:\n bool=True, saveAll: bool=True, saveBackup: bool=True, version: bool=True,\n q=True, query=True, e=True, edit=True, **kwargs)->Union[None, Any]:\n pass",
"def update_with_defaults(self, default_values: dict):\n updates = []\n for key, value in default_values.items():\n for item in self._collect.find({key: {\"$exists\": False}}, {'_id': True}):\n updates.append(pymongo.UpdateOne(item, {\"$set\": {key: value}}))\n\n if len(updates):\n print(\"Update:\", self._collect.bulk_write(updates).modified_count)",
"def addCustomEffect(self, effect, override):\n effectId = effect.getType().getId()\n existing = None\n for mobEffect in getHandle().effects:\n if MobEffectList.getId(mobEffect.getMobEffect()) == effectId:\n existing = mobEffect\n if existing != None:\n if not override:\n return False\n self.getHandle().effects.remove(existing)\n self.getHandle().a(CraftPotionUtil.fromBukkit(effect))\n self.getHandle().refreshEffects()\n return True",
"def test_apply_overrides() -> None:\n m = ParamClass()\n overrides = {\"name\": \"newName\", \"int_tuple\": (0, 1, 2)}\n actual_overrides = m.apply_overrides(overrides)\n assert actual_overrides == overrides\n assert all([x == i and isinstance(x, int) for i, x in enumerate(m.int_tuple)])\n assert m.name == \"newName\"\n # Attempt to change seed and constant, but the latter should be ignored.\n change_seed = {\"seed\": 123}\n old_constant = m.constant\n changes2 = m.apply_overrides({**change_seed, \"constant\": \"Nothing\"})\n assert changes2 == change_seed\n assert m.seed == 123\n assert m.constant == old_constant",
"async def test_cache_miss_used_default_overwrites_voice(self):\n self.cog.previous_overwrites.get.return_value = None\n\n await self.cog._unsilence(self.voice_channel)\n self.voice_channel.set_permissions.assert_awaited_once_with(\n self.cog._verified_voice_role,\n overwrite=self.voice_overwrite,\n )\n\n self.assertIsNone(self.voice_overwrite.connect)\n self.assertIsNone(self.voice_overwrite.speak)",
"def addDefaultShareID(store, shareID, priority):\n _DefaultShareID(store=store, shareID=shareID, priority=priority)",
"def ProcessDatasetOverwrite(ref, args, request):\n del ref\n dataset_id = request.dataset.datasetReference.datasetId\n project_id = request.projectId\n\n if args.overwrite:\n if _DatasetExists(dataset_id, project_id):\n _TryDeleteDataset(dataset_id, project_id)\n\n return request",
"def test_database_object_overwrite_parameter_is_set(self):\n database = generate_database_object(overwrite=True)\n\n self.assertEqual(\n True,\n database.overwrite == True,\n \"Database object did not have an overwrite flag, despite being created with one.\"\n )",
"def remove_overrides(self):\n raise NotImplementedError(\n \"{} Method `remove_overrides` not implemented!\".format(\n repr(self)\n )\n )",
"def merge_parameter(base_params, override_params):\n if override_params is None:\n return base_params\n is_dict = isinstance(base_params, dict)\n for k, v in override_params.items():\n if is_dict:\n if k not in base_params:\n raise ValueError('Key \\'%s\\' not found in base parameters.' % k)\n if type(base_params[k]) != type(v) and base_params[k] is not None:\n raise TypeError('Expected \\'%s\\' in override parameters to have type \\'%s\\', but found \\'%s\\'.' %\n (k, type(base_params[k]), type(v)))\n base_params[k] = v\n else:\n if not hasattr(base_params, k):\n raise ValueError('Key \\'%s\\' not found in base parameters.' % k)\n if type(getattr(base_params, k)) != type(v) and getattr(base_params, k) is not None:\n raise TypeError('Expected \\'%s\\' in override parameters to have type \\'%s\\', but found \\'%s\\'.' %\n (k, type(getattr(base_params, k)), type(v)))\n setattr(base_params, k, v)\n return base_params",
"def get_chart_override(overrides, chart):\n chart_ov = None\n for chart_ov in overrides:\n if 'name' in chart_ov and chart_ov['name'] == chart:\n break\n else:\n chart_ov = None\n if not (chart_ov and 'user_overrides' in chart_ov):\n return None\n if not chart_ov['user_overrides']:\n # A sanity check. Really shouldn't see this if oidc-auth-apps\n # does not have dex overrides - either because the app is not\n # applied, or because it failed to apply without overrides\n return None\n # convert the string to python structures\n return yaml.safe_load(chart_ov['user_overrides'])",
"def add_over(self: CorridorGroup, override: CorridorGroup) -> None:\n for key, corr_over in override.corridors.items():\n try:\n corr_base = self.corridors[key]\n except KeyError:\n self.corridors[key] = corr_over\n else:\n corr_base.extend(corr_over)",
"def remove_default(self):\n if self.default_present:\n self.removeItem(0)\n self.default_present = False",
"def set_default_values_as_needed(self):\n if self.verbose:\n click.echo('Updating required default values')\n for field in ARGUMENTS_DEFAULT_VALUES:\n if self.__class__.__name__ in ARGUMENTS_DEFAULT_VALUES[field][1]:\n self.data[field] = ARGUMENTS_DEFAULT_VALUES[field][0]",
"def test_default_save_strategy_should_be_always(self):\r\n v = DefaultModelLevelSaveStrategy.create(val=1)\r\n assert 'val' in v.as_save_params()\r\n v.val = 2\r\n assert 'val' in v.as_save_params()\r\n v.save()",
"def update_default_from_dict(self, key, value):\n pass",
"def _update_default(self, default_value):\n if self.type == \"uri_folder\" or self.type == \"uri_file\":\n self.default = default_value\n return\n else:\n if isinstance(default_value, float) and not math.isfinite(default_value):\n # Since nan/inf cannot be stored in the backend, just ignore them.\n # logger.warning(\"Float default value %r is not allowed, ignored.\" % default_value)\n return\n \"\"\"Update provided default values.\n Here we need to make sure the type of default value is allowed or it could be parsed..\n \"\"\"\n if default_value is not None and not isinstance(default_value, self._allowed_types):\n try:\n default_value = self._parse(default_value)\n except Exception as e:\n if self.name is None:\n msg = \"Default value of %s Input cannot be parsed, got '%s', type = %s.\" % (\n self.type,\n default_value,\n type(default_value),\n )\n else:\n msg = \"Default value of %s Input '%s' cannot be parsed, got '%s', type = %s.\" % (\n self.type,\n self.name,\n default_value,\n type(default_value),\n )\n raise MldesignerComponentDefiningError(cause=msg) from e\n self.default = default_value",
"def create_defaults(self, request):\n ThemeKeys_obj = ThemeKeys()\n ThemeKeys_obj.create_default_keys()\n\n WebsitePreferenceKeys_obj = WebsitePreferenceKeys()\n WebsitePreferenceKeys_obj.create_default_keys()\n\n context = {\n 'title': \"Creazione chiavi e temi di default\",\n 'opts': self.model._meta,\n 'app_label': self.model._meta.app_label,\n 'has_permission': request.user.is_superuser,\n 'site_url': '/',\n }\n\n messages.add_message(request, messages.SUCCESS, 'Valori di default creati con successo.')\n\n return render(request, 'admin/custom_view/create_defaults.html', context)",
"def validate_overrides(overrides):\n DEFINES.clear()\n if not overrides:\n # dex without overrides isn't configured correctly\n LOG.error(\"oidc-auth-apps: no overrides to validate\")\n return False\n elif type(overrides) is not list:\n # this shouldn't happen\n LOG.error(\"oidc-auth-apps: overrides not list type\")\n return False\n # Find dex; only dex helm needs conversion\n document = get_chart_override(overrides, 'dex')\n if not document:\n LOG.error(\"oidc-auth-apps: no dex user_overrides to validate\")\n return False\n validate = yaml.safe_load(validation_yaml)\n return validate_document(validate, document)",
"def set_preference(cls, user, preference_key, preference_value):\r\n user_pref, _ = cls.objects.get_or_create(user=user, key=preference_key)\r\n user_pref.value = preference_value\r\n user_pref.save()",
"def __delete__(self, instance):\n raise AttributeError(\"A Default Property cannot be deleted\")",
"def set(self, **kwargs: Any) -> None:\n self._overrides.update(kwargs)",
"def user_overrides(f, overrides=None, *args, **kwargs):\n # problematically i don't really know how to do this.\n\n @functools.wraps\n def _():\n return f(overrides)\n\n return f(*args, **kwargs)",
"def set_pref(self, name, value):\r\n pass",
"def preference(self, key, value):\n self.command(\"preference %(key)s %(value)s\" % locals())",
"def ProcessTableOverwrite(ref, args, request):\n dataset_id = ref.datasetId\n table_id = ref.Name()\n project_id = ref.projectId\n\n if args.overwrite:\n if _TableExists(dataset_id, table_id, project_id):\n _TryDeleteTable(dataset_id, table_id, project_id)\n\n return request",
"def overridable(name, default):\n return getattr(django_settings, name, default)",
"def test_force_override(self):\n DummyLoader.register()\n try:\n DummyLoader.register(override=True)\n except ValueError:\n self.fail('Can not register if passing `override` set to `True`.')",
"def set_default(self, name, default, group=None):\n opt_info = self._get_opt_info(name, group)\n opt_info['default'] = self._get_enforced_type_value(\n opt_info['opt'], default)\n opt_info['location'] = LocationInfo(\n Locations.set_default,\n _get_caller_detail(3), # this function has a decorator to skip\n )",
"def is_overridden(self, setting_name):\n return setting_name in self._explicit_settings",
"def replace_platform_variable(name, op):\n if not name in g_platform_variables:\n raise RuntimeError(\"trying to destroy nonexistent platform variable '%s'\" % (name))\n g_platform_variables[name] = { \"default\" : op }",
"def create_or_update_match_settings(\n request: MatchSettingsUpdateRequest,\n ) -> MatchSettingsUpdateResponse:\n if config := AdditionalMatchSettingsConfig.get(request.privacy_group_id):\n config.pdq_match_threshold = request.pdq_match_threshold\n hmaconfig.update_config(config)\n\n return MatchSettingsUpdateResponse(\n f\"match_settings updated for pg_id {request.privacy_group_id} with pdq_match_threshold={request.pdq_match_threshold}\"\n )\n\n config = AdditionalMatchSettingsConfig(\n request.privacy_group_id, request.pdq_match_threshold\n )\n hmaconfig.create_config(config)\n return MatchSettingsUpdateResponse(\n f\"match_settings created for pg_id {request.privacy_group_id} with pdq_match_threshold={request.pdq_match_threshold}\"\n )",
"def tax_override(self, tax_override):\n\n self._tax_override = tax_override",
"def local_override(self, func):\n @functools.wraps(func)\n def inner(*args, **kwargs):\n local_options = {}\n option_overrides = kwargs.get('options', kwargs)\n for k, v in option_overrides.items():\n if k in self.option_fields:\n local_options[k] = kwargs.pop(k)\n\n # Store current options before the local override in memory, so they\n # can be used after the function returns to reset the `obj:Options`\n # instance.\n current_options = deepcopy(self.__dict__)\n\n # Apply overrides and allow the method to run with the overrides\n # applied.\n self.override(local_options)\n result = func(*args, **kwargs)\n\n # Wipe out the locally scoped overrides and reset the state back to\n # what it was before the method was called.\n self.populate(current_options)\n return result\n\n return inner",
"def save(self, file_name_override=None):\n self.protocol.commit(file_name_override=file_name_override)"
] | [
"0.5960256",
"0.590297",
"0.5773372",
"0.5702012",
"0.5510236",
"0.5490459",
"0.54790086",
"0.5437721",
"0.5424676",
"0.54007906",
"0.5397106",
"0.52891964",
"0.5250817",
"0.5232657",
"0.5229502",
"0.5227945",
"0.52064884",
"0.5171837",
"0.51337427",
"0.51274",
"0.5075617",
"0.5021524",
"0.50072694",
"0.4985852",
"0.49474332",
"0.49382317",
"0.4937766",
"0.49370363",
"0.49351534",
"0.49166593",
"0.49053195",
"0.490195",
"0.48809764",
"0.48794603",
"0.48727554",
"0.48458636",
"0.48235616",
"0.4812733",
"0.48041478",
"0.47978884",
"0.47941035",
"0.47893974",
"0.47787684",
"0.47745895",
"0.47709656",
"0.47701627",
"0.47472245",
"0.47421175",
"0.47346708",
"0.47322798",
"0.4724948",
"0.47244504",
"0.4715082",
"0.46977168",
"0.46812958",
"0.46643054",
"0.46636632",
"0.46634498",
"0.4648909",
"0.4642647",
"0.46332827",
"0.46292272",
"0.46099183",
"0.4608345",
"0.460416",
"0.4595542",
"0.4594147",
"0.45859686",
"0.45815217",
"0.4567595",
"0.45609805",
"0.4548015",
"0.45402086",
"0.45388737",
"0.4538624",
"0.4537456",
"0.45347226",
"0.45225537",
"0.4511528",
"0.45074624",
"0.45042115",
"0.45033446",
"0.45016742",
"0.44853324",
"0.44826075",
"0.4479421",
"0.44792005",
"0.44788483",
"0.44757473",
"0.4473991",
"0.44673175",
"0.4462635",
"0.44608086",
"0.44442412",
"0.44442347",
"0.4443767",
"0.44412303",
"0.44376135",
"0.4436588",
"0.44363895"
] | 0.53815335 | 11 |
Training script for hyperparameter evaluation of Gradient Boosting model | def lgb_trial(args, reporter):
try_import_lightgbm()
import lightgbm as lgb
# list of args which are not model hyperparameters:
nonparam_args = set(['directory', 'task_id', 'lgb_model', 'dataset_train_filename', 'dataset_val_filename'])
trial_id = args.task_id # Note may not start at 0 if HPO has been run for other models with same scheduler
directory = args.directory
file_prefix = "trial_"+str(trial_id)+"_" # append to all file names created during this trial. Do NOT change!
lgb_model = args.lgb_model
lgb_model.params = lgb_model.params.copy() # ensure no remaining pointers across trials
for key in args:
if key not in nonparam_args:
lgb_model.params[key] = args[key] # use these hyperparam values in this trial
dataset_train = lgb.Dataset(directory+args.dataset_train_filename)
dataset_val_filename = args.get('dataset_val_filename', None)
if dataset_val_filename is not None:
dataset_val = lgb.Dataset(directory+dataset_val_filename)
eval_metric = lgb_model.get_eval_metric()
if lgb_model.problem_type == BINARY:
train_loss_name = 'binary_logloss'
elif lgb_model.problem_type == MULTICLASS:
train_loss_name = 'multi_logloss'
elif lgb_model.problem_type == REGRESSION:
train_loss_name = 'l2'
else:
raise ValueError("unknown problem_type for LGBModel: %s" % lgb_model.problem_type)
lgb_model.eval_results = {}
callbacks = []
valid_names = ['train_set']
valid_sets = [dataset_train]
if dataset_val is not None:
callbacks += [
hpo_callback(reporter=reporter, stopping_rounds=150, metrics_to_use=[('valid_set', lgb_model.eval_metric_name)],
max_diff=None, ignore_dart_warning=True, verbose=False, train_loss_name=train_loss_name, eval_results=lgb_model.eval_results)
]
valid_names = ['valid_set'] + valid_names
valid_sets = [dataset_val] + valid_sets
else:
raise NotImplementedError("cannot call gbm hyperparameter_tune without validation dataset")
num_boost_round = lgb_model.params.pop('num_boost_round', 1000)
seed_value = lgb_model.params.pop('seed_value', None)
train_params = {
'params': lgb_model.params.copy(),
'train_set': dataset_train,
'num_boost_round': num_boost_round,
'valid_sets': valid_sets,
'valid_names': valid_names,
'evals_result': lgb_model.eval_results,
'callbacks': callbacks,
'verbose_eval': -1,
}
if type(eval_metric) != str:
train_params['feval'] = eval_metric
if seed_value is not None:
train_params['seed'] = seed_value
random.seed(seed_value)
np.random.seed(seed_value)
lgb_model.model = lgb.train(**train_params)
lgb_model.params['num_boost_round'] = num_boost_round # re-set this value after training
if seed_value is not None:
lgb_model.params['seed_value'] = seed_value
lgb_model.best_iteration = lgb_model.model.best_iteration
# TODO: difficult to ensure these iters always match
# if lgb_model.eval_results['best_iter'] != lgb_model.best_iteration:
# raise ValueError('eval_results[best_iter]=%s does not match lgb_model.best_iteration=%s' % (lgb_model.eval_results['best_iter'], lgb_model.best_iteration) )
# print('eval_results[best_iter]=%s does not match lgb_model.best_iteration=%s' % (lgb_model.eval_results['best_iter'], lgb_model.best_iteration) )
trial_model_file = lgb_model.save(file_prefix=file_prefix, directory=directory, return_filename=True)
reporter(epoch=num_boost_round+1, validation_performance=lgb_model.eval_results['best_valperf'],
train_loss=lgb_model.eval_results['best_trainloss'],
best_iteration=lgb_model.eval_results['best_iter'],
directory=directory, file_prefix=file_prefix, trial_model_file=trial_model_file)
# TODO: add to reporter: time_of_trial without load/save time (isn't this just function of early-stopping point?), memory/inference ?? | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def config1() :\n data_name = \"titanic\" ### in data/input/\n model_class = 'AutoML' ### ACTUAL Class name for model_sklearn.py\n n_sample = 1000\n\n def post_process_fun(y): ### After prediction is done\n return int(y)\n\n def pre_process_fun(y): ### Before the prediction is done\n return int(y)\n\n\n model_dict = {'model_pars': {\n ### LightGBM API model #######################################\n 'model_class': model_class\n ,'model_pars' : {\n 'total_time_limit' : 20,\n 'algorithms' : 'auto',\n 'results_path' : root_repo + f'/data/output/{data_name}/{os_get_function_name()}/automl_1',\n 'eval_metric' : 'auto'\n\n # mode='Explain',\n # ml_task='auto', model_time_limit=None, algorithms='auto', train_ensemble=True,\n # stack_models='auto', eval_metric='auto', validation_strategy='auto', explain_level='auto',\n # golden_features='auto', features_selection='auto', start_random_models='auto',\n # hill_climbing_steps='auto', top_models_to_improve='auto', verbose=1, random_state=1234)\n }\n\n , 'post_process_fun' : post_process_fun ### After prediction ##########################################\n , 'pre_process_pars' : {'y_norm_fun' : pre_process_fun , ### Before training ##########################\n\n\n ### Pipeline for data processing ##############################\n 'pipe_list': [\n #### coly target prorcessing\n {'uri': 'source/prepro.py::pd_coly', 'pars': {}, 'cols_family': 'coly', 'cols_out': 'coly', 'type': 'coly' },\n\n\n {'uri': 'source/prepro.py::pd_colnum_bin', 'pars': {}, 'cols_family': 'colnum', 'cols_out': 'colnum_bin', 'type': '' },\n {'uri': 'source/prepro.py::pd_colnum_binto_onehot', 'pars': {}, 'cols_family': 'colnum_bin', 'cols_out': 'colnum_onehot', 'type': '' },\n\n #### catcol INTO integer, colcat into OneHot\n {'uri': 'source/prepro.py::pd_colcat_bin', 'pars': {}, 'cols_family': 'colcat', 'cols_out': 'colcat_bin', 'type': '' },\n # {'uri': 'source/prepro.py::pd_colcat_to_onehot', 'pars': {}, 'cols_family': 'colcat_bin', 'cols_out': 'colcat_onehot', 'type': '' },\n\n\n ### Cross_feat = feat1 X feat2\n # {'uri': 'source/prepro.py::pd_colcross', 'pars': {}, 'cols_family': 'colcross', 'cols_out': 'colcross_pair', 'type': 'cross'},\n\n\n #### Example of Custom processor\n #{'uri': THIS_FILEPATH + '::pd_col_myfun', 'pars': {}, 'cols_family': 'colnum', 'cols_out': 'col_myfun', 'type': '' }, \n\n\n ],\n }\n },\n\n 'compute_pars': { 'metric_list': ['accuracy_score','average_precision_score']\n\n ,'mlflow_pars' : None # {} ### Not empty --> use mlflow\n },\n\n 'data_pars': { 'n_sample' : n_sample,\n\n 'download_pars' : None,\n\n\n 'cols_input_type' : cols_input_type_1,\n ### family of columns for MODEL #########################################################\n # \"colnum\", \"colnum_bin\", \"colnum_onehot\", \"colnum_binmap\", #### Colnum columns\n # \"colcat\", \"colcat_bin\", \"colcat_onehot\", \"colcat_bin_map\", #### colcat columns\n # 'colcross_single_onehot_select', \"colcross_pair_onehot\", 'colcross_pair', #### colcross columns 'coldate', 'coltext',\n 'cols_model_group': [ 'colnum_bin',\n 'colcat_bin',\n # 'coltext',\n # 'coldate',\n #'colcross_pair',\n \n ### example of custom\n # 'col_myfun'\n ]\n\n ### Filter data rows ##################################################################\n ,'filter_pars': { 'ymax' : 2 ,'ymin' : -1 }\n\n }\n }\n\n ##### Filling Global parameters ############################################################\n model_dict = global_pars_update(model_dict, data_name, config_name=os_get_function_name() )\n return model_dict",
"def train_gradient_boost(self, params, num_boost_round = 50):\n print \"training GB......\"\n dtrain = xgb.DMatrix(self.X, self.y)\n model = xgb.train(params, dtrain, num_boost_round = num_boost_round)\n self.models += [model]",
"def __init__(self, in_features, out_features):\n \n ########################\n # PUT YOUR CODE HERE #\n #######################\n mean = 0\n std_dev = 0.0001\n #print(in_features)\n #print(out_features)\n # create weight matrices\n weight = np.random.normal(mean, std_dev, (out_features, in_features))\n #print(weight.shape)\n grad_weight = np.zeros((in_features, out_features))\n\n # create biases (in batches)\n bias = np.zeros(out_features)\n grad_bias = np.zeros(out_features)\n\n self.params = {'weight': weight, 'bias': bias}\n self.grads = {'weight': bias, 'bias': grad_bias}\n\n ########################\n # END OF YOUR CODE #\n #######################",
"def lgb_hyperopt(data, labels, num_evals=1000, n_folds=5, diagnostic=False):\r\n LGBM_MAX_LEAVES = 2**11 #maximum number of leaves per tree for LightGBM\r\n LGBM_MAX_DEPTH = 25 #maximum tree depth for LightGBM \r\n EVAL_METRIC_LGBM_CLASS = 'f1'\r\n\r\n def lgb_f1_score(y_hat, data):\r\n y_true = data.get_label()\r\n y_hat = np.round(y_hat)\r\n return 'f1', f1_score(y_true, y_hat), True\r\n\r\n print('Running {} rounds of LightGBM parameter optimisation:'.format(num_evals))\r\n #clear space\r\n \r\n integer_params = ['max_depth',\r\n 'num_leaves',\r\n 'max_bin',\r\n 'min_data_in_leaf',\r\n 'min_data_in_bin']\r\n \r\n def objective(space_params):\r\n \r\n #cast integer params from float to int\r\n for param in integer_params:\r\n space_params[param] = int(space_params[param])\r\n \r\n #extract nested conditional parameters\r\n if space_params['boosting']['boosting'] == 'goss':\r\n top_rate = space_params['boosting'].get('top_rate')\r\n other_rate = space_params['boosting'].get('other_rate')\r\n #0 <= top_rate + other_rate <= 1\r\n top_rate = max(top_rate, 0)\r\n top_rate = min(top_rate, 0.5)\r\n other_rate = max(other_rate, 0)\r\n other_rate = min(other_rate, 0.5)\r\n space_params['top_rate'] = top_rate\r\n space_params['other_rate'] = other_rate\r\n \r\n subsample = space_params['boosting'].get('subsample', 1.0)\r\n space_params['boosting'] = space_params['boosting']['boosting']\r\n space_params['subsample'] = subsample\r\n \r\n cv_results = lgb.cv(space_params, train, nfold = n_folds, stratified=True,\r\n early_stopping_rounds=100, seed=42, feval=lgb_f1_score)\r\n \r\n best_loss = -cv_results['f1-mean'][-1]\r\n\r\n return{'loss':best_loss, 'status': STATUS_OK }\r\n \r\n train = lgb.Dataset(data, labels)\r\n \r\n #integer and string parameters, used with hp.choice()\r\n boosting_list = [{'boosting': 'gbdt',\r\n 'subsample': hp.uniform('subsample', 0.5, 1)},\r\n {'boosting': 'goss',\r\n 'subsample': 1.0,\r\n 'top_rate': hp.uniform('top_rate', 0, 0.5),\r\n 'other_rate': hp.uniform('other_rate', 0, 0.5)}] #if including 'dart', make sure to set 'n_estimators'\r\n\r\n objective_list_reg = ['huber', 'gamma', 'fair', 'tweedie']\r\n objective_list_class = ['binary', 'cross_entropy']\r\n objective_list = objective_list_class\r\n is_unbalance_list = [True]\r\n\r\n space ={'boosting' : hp.choice('boosting', boosting_list),\r\n 'num_leaves' : hp.quniform('num_leaves', 2, LGBM_MAX_LEAVES, 1),\r\n 'max_depth': hp.quniform('max_depth', 2, LGBM_MAX_DEPTH, 1),\r\n 'max_bin': hp.quniform('max_bin', 32, 255, 1),\r\n 'min_data_in_leaf': hp.quniform('min_data_in_leaf', 1, 256, 1),\r\n 'min_data_in_bin': hp.quniform('min_data_in_bin', 1, 256, 1),\r\n 'min_gain_to_split' : hp.quniform('min_gain_to_split', 0.1, 5, 0.01),\r\n 'lambda_l1' : hp.uniform('lambda_l1', 0, 5),\r\n 'lambda_l2' : hp.uniform('lambda_l2', 0, 5),\r\n 'learning_rate' : hp.loguniform('learning_rate', np.log(0.005), np.log(0.2)),\r\n 'metric' : None, \r\n 'objective' : hp.choice('objective', objective_list),\r\n 'feature_fraction' : hp.quniform('feature_fraction', 0.5, 1, 0.01),\r\n 'bagging_fraction' : hp.quniform('bagging_fraction', 0.5, 1, 0.01),\r\n 'is_unbalance' : hp.choice('is_unbalance', is_unbalance_list)\r\n }\r\n\r\n trials = Trials()\r\n best = fmin(fn=objective,\r\n space=space,\r\n algo=tpe.suggest,\r\n max_evals=num_evals, \r\n trials=trials)\r\n \r\n #fmin() will return the index of values chosen from the lists/arrays in 'space'\r\n #to obtain actual values, index values are used to subset the original lists/arrays\r\n #extract nested conditional parameters\r\n try:\r\n if best['boosting']['boosting'] == 'goss':\r\n top_rate = best['boosting'].get('top_rate')\r\n other_rate = best['boosting'].get('other_rate')\r\n #0 <= top_rate + other_rate <= 1\r\n top_rate = max(top_rate, 0)\r\n top_rate = min(top_rate, 0.5)\r\n other_rate = max(other_rate, 0)\r\n other_rate = min(other_rate, 0.5)\r\n best['top_rate'] = top_rate\r\n best['other_rate'] = other_rate\r\n except:\r\n if boosting_list[best['boosting']]['boosting'] == 'goss':\r\n top_rate = best['top_rate']\r\n other_rate = best['other_rate']\r\n #0 <= top_rate + other_rate <= 1\r\n top_rate = max(top_rate, 0)\r\n top_rate = min(top_rate, 0.5)\r\n other_rate = max(other_rate, 0)\r\n other_rate = min(other_rate, 0.5)\r\n best['top_rate'] = top_rate\r\n best['other_rate'] = other_rate\r\n best['boosting'] = boosting_list[best['boosting']]['boosting']#nested dict, index twice\r\n best['metric'] = metric_list[best['metric']]\r\n best['objective'] = objective_list[best['objective']]\r\n best['is_unbalance'] = is_unbalance_list[best['is_unbalance']]\r\n \r\n #cast floats of integer params to int\r\n for param in integer_params:\r\n best[param] = int(best[param])\r\n \r\n print('{' + '\\n'.join('{}: {}'.format(k, v) for k, v in best.items()) + '}')\r\n if diagnostic:\r\n return(best, trials)\r\n else:\r\n return(best)",
"def train_model(X_train, y_train, X_valid, y_valid, params=None, model_type='lgb', \r\n model_path_name='lgb', plot_feature_importance=False, model=None):\r\n def lgb_f1_score(y_true, y_pred):\r\n y_pred = np.round(y_pred)\r\n return 'f1', f1_score(y_true, y_pred), True\r\n\r\n scores = []\r\n feature_importance = pd.DataFrame()\r\n print('Started at', time.ctime())\r\n \r\n \r\n if model_type == 'lgb':\r\n \r\n model = lgb.LGBMClassifier(**params, n_estimators=50000, n_jobs=-1)\r\n model.fit(X_train, y_train, eval_set=(X_valid, y_valid), \r\n eval_metric=lgb_f1_score, early_stopping_rounds=300)\r\n \r\n y_pred_valid = model.predict(X_valid)\r\n \r\n if model_type == 'cat':\r\n model = cb.CatBoost(iterations=20000, **params)\r\n model.fit(X_train, y_train, eval_set=(X_valid, y_valid), cat_features=[], use_best_model=True, verbose=False)\r\n y_pred_valid = model.predict(X_valid)\r\n\r\n #save the model\r\n joblib.dump(model, model_path_name)\r\n \r\n scores.append(f1_score(y_valid, y_pred_valid)) \r\n \r\n if model_type == 'lgb':\r\n # feature importance\r\n fold_importance = pd.DataFrame()\r\n fold_importance[\"feature\"] = X_train.columns\r\n fold_importance[\"importance\"] = model.feature_importances_\r\n feature_importance = pd.concat([feature_importance, fold_importance], axis=0)\r\n \r\n print('score: {0:.4f}.'.format(np.mean(scores)))\r\n\r\n if model_type == 'lgb':\r\n feature_importance[\"importance\"]\r\n if plot_feature_importance:\r\n cols = feature_importance[[\"feature\", \"importance\"]].groupby(\"feature\").mean().sort_values(\r\n by=\"importance\", ascending=False)[:50].index\r\n\r\n best_features = feature_importance.loc[feature_importance.feature.isin(cols)]\r\n\r\n #sns.barplot(x=\"importance\", y=\"feature\", data=best_features.sort_values(by=\"importance\", ascending=False));\r\n \r\n return feature_importance, np.mean(scores)\r\n return np.mean(scores)\r\n \r\n else:\r\n return np.mean(scores)",
"def main():\n \n # The following 5 command lines can be outcommented if the features are already created.\n # There is no need to process the data every single time.\n # Fine tuning the learning algorythm is much faster without that extra step.\n \n # by reading the train dataset the feature index is created.\n # First calling of the processdata function\n # Data limited to 300000\n featureIndexes = processData(os.path.join(dataFolder,\"avito_train.tsv\"), itemsLimit=600000)\n print \"featureIndex generated!\"\n print len(featureIndexes)\n\n # Trainfeature is created using the indexfeatures...\n # Second calling of the processdata function\n trainFeatures, trainTargets, trainItemIds, trainPrices, trainUrls, trainPhones, trainEmails, trainLength = processData(os.path.join(dataFolder,\"avito_train.tsv\"), itemsLimit=600000) # Original itemsLimit=300000\n\n # Building the test dataset... just like the training...\n testFeatures, testItemIds, testPrices, testUrls, testPhones, testEmails, testLength = processData(os.path.join(dataFolder,\"avito_test.tsv\"), featureIndexes)\n\n # Dumping data into file...\n # joblib.dump((trainFeatures, trainTargets, trainItemIds, testFeatures, testItemIds), os.path.join(dataFolder,\"train_data.pkl\"))\n joblib.dump((trainFeatures,trainTargets,trainItemIds,trainPrices,trainUrls,trainPhones,trainEmails,trainLength,\n testFeatures, testItemIds,testPrices,testUrls,testPhones,testEmails,testLength), os.path.join(dataFolder,\"SeparatedByCategory.pkl\"))\n\n\n # loading data pack...\n # trainFeatures, trainTargets, trainItemIds, testFeatures, testItemIds = joblib.load(os.path.join(dataFolder,\"train_data.pkl\"))\n\n #logging.info(\"Feature preparation done, fitting model...\")\n\n # Stochastic gradient model",
"def train(self, trainingData, trainingLabels, validationData, validationLabels ):\n\n self.setWeights(trainingData.shape[1])\n # DO NOT ZERO OUT YOUR WEIGHTS BEFORE STARTING TRAINING, OR\n # THE AUTOGRADER WILL LIKELY DEDUCT POINTS.\n \n # Hyper-parameters. Your can reset them. Default batchSize = 100, weight_decay = 1e-3, learningRate = 1e-2\n \"*** YOU CODE HERE ***\"\n self.batchSize = 100\n self.weight_decay = 1e-3\n self.learningRate = 0.1\n\n def Softmax(x):\n x_max = np.max(x, axis=0)\n x_exp = np.exp(x - x_max)\n x_exp_sum = np.sum(x_exp, axis=0)\n return x_exp / x_exp_sum\n\n for iteration in range(self.max_iterations):\n if iteration % 10 == 0:\n print(\"Starting iteration \", iteration, \"...\")\n self.learningRate *= 0.9\n dataBatches = self.prepareDataBatches(trainingData, trainingLabels)\n for batchData, batchLabel in dataBatches:\n \"*** YOUR CODE HERE ***\"\n Y = np.zeros((len(self.legalLabels), self.batchSize))\n for i in range(self.batchSize):\n Y[batchLabel[i]][i] = 1\n Y_pred = Softmax((batchData @ self.weights + self.bias).T)\n d_weight = ((Y_pred - Y) @ batchData / batchData.shape[0]).T + self.weight_decay * sum(self.weights)\n d_bias = np.mean(Y_pred - Y, axis=1) + self.weight_decay * sum(self.bias)\n self.weights -= d_weight * self.learningRate\n self.bias -= d_bias * self.learningRate",
"def train():\n # YOUR TRAINING CODE GOES HERE",
"async def train(gradient_boosting: bool = False) -> bool:\n data = clf.dataset()\n return clf.train(data['X'], data['y'], gradient_boosting)",
"def main():\n\n if len(sys.argv) < 4 or len(sys.argv) > 5:\n print 'Usage: classifier.py data_dimension train_set_path test_set_path [option: add_bias]'; \n return; \n\n # create sets of possible hyperparameter values\n setC = {0.001, 0.01, 0.1, 1, 10, 25, 100}; # trade off regularizer and error minimization\n setRho = {0.001, 0.01, 0.1, 1}; # learning rate for gradient descent \n hyperparams = [setC, setRho];\n \n # create svm classifier for selected data\n dataDim = int(sys.argv[1]);\n trainPath = str(sys.argv[2]);\n testPath = str(sys.argv[3]);\n if len(sys.argv) == 5:\n c = Classifier('svm', hyperparams, dataDim, testPath, trainPath, addBias=True);\n else:\n c = Classifier('svm', hyperparams, dataDim, testPath, trainPath);\n \n print 'Classifier type: ', c.type, \\\n '\\nTraining set: ', trainPath, \\\n '\\nTest set: ', testPath;\n \n print 'Determining hyperparameters to use...';\n c.learnHyperparams(report=1);\n \n print 'Training classifier...';\n c.train();\n \n print 'Performing inference on test set...';\n c.test(); \n \n print '\\nREPORT:', \\\n '\\nUsing hyperparameters: ', c.theta, \\\n '\\nLearned weight vector: ', c.w, \\\n '\\nPrediction accuracy on test set: ', c.accuracy * 100, ' percent';",
"def trainModel( self, featureTrain, classTrain):",
"def train_step(\n config,\n unused_model, # NOT USED\n state,\n unused_opt,\n learning_rate_fn,\n batch,\n rng,\n *unused_args,\n **unused_kwargs,\n):\n step = state.step + 1\n lr = learning_rate_fn(step)\n\n key, rng = jax.random.split(rng)\n rays, pixels = instant_ngp_utils.random_ray_batch(\n key, (config.trainer.per_device_num_rays,), batch\n )\n\n def loss_fn(vox):\n rgb_est, _, _, coarse_den, _, weights, t = instant_ngp_utils.render_rays(\n rays, vox, rng, config\n )\n loss_color_l2 = jnp.mean(jnp.square(rgb_est - pixels))\n loss_color_huber = jnp.mean(huber_loss(rgb_est, pixels))\n loss_distortion = config.trainer.distortion_loss_strength * jnp.mean(\n lossfun_distortion(t, weights)\n )\n loss_density = config.trainer.density_regularization * jnp.mean(\n jnp.square(coarse_den)\n )\n loss = loss_color_huber + loss_density + loss_distortion\n stats = {\n \"loss_color_l2\": loss_color_l2,\n \"loss_color_huber\": loss_color_huber,\n \"loss_density\": loss_density,\n \"loss_distortion\": loss_distortion,\n \"loss\": loss,\n }\n return loss, stats\n\n # Get gradient function, then evaluate it with current parameters\n grad_fn = jax.value_and_grad(loss_fn, has_aux=True)\n (loss, output), grad = grad_fn(state.params)\n if config.get(\"multi\"):\n # Compute average gradient across multiple workers.\n grad = jax.lax.pmean(grad, axis_name=\"batch\")\n state = state.apply_gradients(grads=grad)\n\n mse = output[\"loss_color_l2\"]\n if config.get(\"multi\"):\n grad = jax.lax.pmean(mse, axis_name=\"batch\")\n psnr = image_metrics.compute_psnr(mse=mse)\n if config.get(\"multi\"):\n stats = {k: jax.lax.pmean(v, axis_name=\"batch\") for k, v in output.items()}\n metrics_update = TrainMetrics.gather_from_model_output(\n **stats, learning_rate=lr, psnr=psnr\n )\n else:\n metrics_update = TrainMetrics.single_from_model_output(\n **output, learning_rate=lr, psnr=psnr\n )\n return state, metrics_update, {}",
"def train(features, targets, weights, bias):\n # see gradient_descent for explanation\n epochs = 100\n learning_rate = 0.1\n\n picture_nb = 2\n\n # Print current accuracy. How many people have been classified as sick/healthy correctly?\n predictions = predict(features, weights, bias)\n print(\"Accuracy: \", np.mean(predictions == targets))\n\n for epoch in range(epochs):\n if epoch % 10 == 0:\n # get normalized scores\n predictions = activation(pre_activation(features, weights, bias))\n # compare with targets to see how bad our algorithm is\n print(\"Cost = %s\" % cost(predictions, targets))\n # Replot graph. Check in create_dataset for explanation of parameters\n if picture_nb == 2:\n plt.plot(features[:, 0], (weights[0] * features[:, 0] + bias) / -weights[1], color='red')\n elif picture_nb == 11:\n plt.plot(features[:, 0], (weights[0] * features[:, 0] + bias) / -weights[1], color='green')\n else:\n plt.plot(features[:, 0], (weights[0] * features[:, 0] + bias) / -weights[1], color='orange')\n picture_nb+=1\n\n # Initialize gradients\n # weights_gradients is 2D array with 2 values\n weights_gradients = np.zeros(weights.shape)\n bias_gradient = 0\n # Go through each row\n for feature, target in zip(features, targets):\n # Compute prediction\n z = pre_activation(feature, weights, bias)\n # Get normalized score\n y = activation(z)\n # Update gradients based on formulas established before. Look at gradient_descent to understand what we\n # are doing. Also, the formulas are below, just before the call of the function train.\n weights_gradients += (y - target) * derivative_activation(z) * feature\n # no multiplication of feature because it does not depend on some coordinates.\n bias_gradient += (y - target) * derivative_activation(z)\n\n # Update variables. These are the lines that result the cost to get reduced.\n weights = weights - learning_rate * weights_gradients\n bias = bias - learning_rate * bias_gradient\n\n # Print final accuracy. How many people have been classified as sick/healthy correctly?\n predictions = predict(features, weights, bias)\n print(\"Accuracy: \", np.mean(predictions == targets))\n\n plt.scatter(features[:, 0], features[:, 1], s=40, c=targets, cmap=plt.cm.Spectral)\n plt.savefig(\"DataPointsLineEvolution.png\")\n # legend for understanding\n plt.legend(['Original division', 'New division', 'New division', 'New division', 'New division', 'New division',\n 'New division', 'New division', 'New division', 'Final division'], loc='upper left')\n # save picture of data points drawn.\n plt.savefig(\"DataPointsLineEvolutionLegend.png\")",
"def run_training():\n\tdata_sets, data_w= data_loader.load_laplace(loc=0, scale=1, sample_size=1000, dimension=2, skew=False, whiten=True, rotation=True)\n\n\t# Tell tensorflow that the model will be built into the default gragh\n\twith tf.Graph().as_default():\n\t\t# Generate placeholders for the single image and all training images for objective function\n\t\tinput_placeholder, obj_placeholder = placeholder_inputs(data_sets.shape[0], data_sets.shape[1])\n\n\t\t# Build a graph that computes predictions from the inference model\n\t\toutputs, objs, weights, thresholds = BCM_tf.inference(input_placeholder, obj_placeholder, FLAGS.n_output, FLAGS.obj_type, FLAGS.nonlinear)\n\n\t\t# Add to the Graph that Ops that train the model\n\t\tupdate_w, update_thres = BCM_tf.training(input_placeholder, weights, thresholds, outputs, FLAGS.n_output, FLAGS.obj_type, FLAGS.nonlinear, FLAGS.eta, FLAGS.decay, FLAGS.tau, FLAGS.p)\n\n\t\t# Build the summary Tensor based on the TF collections fo Summaries\n\t\tsummary = tf.summary.merge_all()\n\n\t\t# Add the variable initializer Op.\n\t\tinit = tf.global_variables_initializer()\n\n\t\t# Create a saver for writing training checkpoints.\n\t\tsaver = tf.train.Saver()\n\n\t\t# Create a session for running Ops on the Graph\n\t\tsess = tf.Session()\n\n\t\t# Instantiate a Summary Writer to output summaries and the Graph\n\t\tsummary_writer = tf.summary.FileWriter(FLAGS.train_dir, sess.graph)\n\n\t\t# After everything is built:\n\n\t\t# Run the Op to initialize the variables.\n\t\tsess.run(init)\n\n\t\t# Start the training loop\n\t\tfor step in range(FLAGS.epochs):\n\t\t\ttf.random_shuffle(data_sets)\n\t\t\tfor sample in range(data_sets.shape[0]):\n\t\t\t\t# Fill a feed dictionary with the actual set of images and labels\n\t\t\t\t# For this particular training step\n\t\t\t\tfeed_dict = fill_feed_dict(data_sets[sample, :].reshape([1, 2]), data_sets, input_placeholder, obj_placeholder)\n\n\t\t\t\t# Run one step of the mode. The return values are the outputs\n\t\t\t\tsess.run(update_w, feed_dict=feed_dict)\n\t\t\t\tsess.run(update_thres, feed_dict=feed_dict)\n\n\t\t\t\t# Write summaries and print overview fairly often\n\t\t\t\t# if (step+1) * sample % 100 == 0:\n\t\t\t\t\t# Print status to stdout\n\t\t\t\t # print('Iteration %d:' % (weights[0, 0]))\n\t\t\t\t\t# Update the event file\n\t\t\t\t\t#summary_str = sess.run(summary, feed_dict=feed_dict)\n\t\t\t\t\t#summary_writer.add_summary(summary_str, step)\n\t\t\t\t\t#summary_writer.flush()\n\n\t\tfinal_w = sess.run(weights).reshape(1,4)\n\n\treturn final_w, data_w",
"def train_hyperopt(params):\n lasagne.random.set_rng(RandomState(9859295))\n\n template_name = params.pop('template_name') \n params = adjust_params_for_hyperopt(params)\n \n config_strings = create_config_strings(template_name)\n config_objects = create_config_objects(config_strings)\n templates, _ = create_templates_variants_from_config_objects(\n config_objects)\n \n \n processed_templates, params_without_template_params = process_templates(\n templates, params)\n final_params = process_parameters_by_templates(params_without_template_params, \n processed_templates)\n \n # go to directory above this source-file\n main_template_filename = os.path.dirname(os.path.abspath(os.path.dirname(\n __file__)))\n # then complete path to config\n main_template_filename = os.path.join(main_template_filename, \"configs\", \n \"eegnet_template.yaml\")\n \n with (open(main_template_filename, 'r')) as main_template_file:\n main_template_str = main_template_file.read()\n \n \n final_params['original_params'] = 'dummy'\n train_str = Template(main_template_str).substitute(final_params)\n \n def do_not_load_constructor(loader, node):\n return None\n yaml.add_constructor(u'!DoNotLoad', do_not_load_constructor)\n modified_train_str = train_str.replace('layers: ', 'layers: !DoNotLoad ')\n train_dict = yaml_parse.load(modified_train_str) \n dataset = train_dict['dataset'] \n dataset.load()\n dataset_provider = train_dict['dataset_provider']\n \n assert 'in_sensors' in train_str\n assert 'in_rows' in train_str\n assert 'in_cols' in train_str\n \n train_str = train_str.replace('in_sensors',\n str(dataset.get_topological_view().shape[1]))\n train_str = train_str.replace('in_rows',\n str(dataset.get_topological_view().shape[2]))\n train_str = train_str.replace('in_cols', \n str(dataset.get_topological_view().shape[3]))\n \n train_dict = yaml_parse.load(train_str)\n layers = train_dict['layers']\n final_layer = layers[-1]\n\n # turn off debug/info logging\n logging.getLogger(\"pylearn2\").setLevel(logging.WARN)\n logging.getLogger(\"braindecode\").setLevel(logging.WARN)\n exp = Experiment()\n exp.setup(final_layer, dataset_provider, **train_dict['exp_args'])\n exp.run()\n final_misclass = exp.monitor_chans['test_misclass'][-1]\n print(\"Result for\")\n pprint(params)\n print(\"Final Test misclass: {:5.4f}\".format(float(final_misclass)))\n return final_misclass",
"def __init__(self, model, data, batch_size=50, num_epochs=2, optim_type=\"adam\", optim_config={'learning_rate': 1e-2,}, lr_decay=1.0, num_train_samples=100, num_val_samples=None, verbose=True):\n self.model = model\n \n self.X_train = data[\"X_train\"]\n self.y_train = data[\"y_train\"]\n self.X_val = data[\"X_val\"]\n self.y_val = data[\"y_val\"]\n\n # Setting up variables for the hyperparameters\n \n self.optim_type = optim_type\n self.optim_config = optim_config # dict containing hyperparameters related to parameter update\n self.lr_decay = lr_decay # learning rate decay rate\n self.batch_size = batch_size\n self.num_epochs = num_epochs\n self.num_train_samples = num_train_samples\n self.num_val_samples = num_val_samples\n\n self.print_every = 20\n self.verbose = verbose\n \n # Setting up some extra variables for faster convergence / book-keeping\n \n self.epoch = 0 # to keep track of number of epochs done\n self.best_val_acc = 0 # to keep track of the best val accuracy across all epochs\n self.best_params = {} # to keep track of best model across all epochs\n self.latest_loss = 0 # to keep track of loss in latest iteration\n\n # Making a copy of the optim_config for each parameter\n # for using in other functions of the solver class\n # optim_cofig contains first and second moment of gradients, if applicable, wrt 1 param and hence each parameter has its own optim_config dict\n \n self.optim_configs = {} # dictionary containing config dicts of all params\n for p in self.model.params:\n d = {k: v for k, v in self.optim_config.items()} # copying the input config dict to config dicts of all params\n self.optim_configs[p] = d",
"def _train_and_evaluate(estimator, output_dir):\n \n \"\"\"X_train, y_train =utils._feature_label_split(df_train,\"is_churn\",\"msno\")\n df_val = utils.read_from_bigquery(\"amiable-octane-267022.kkbox.output_val_1\",\"amiable-octane-267022\")\n X_val, y_val =utils._feature_label_split(df_val,\"is_churn\",\"msno\")\"\"\"\n \n df_train=utils.over_sample(\"amiable-octane-267022.kkbox.output_train_1\",\"amiable-octane-267022\")\n X_train, y_train =utils._feature_label_split(df_train,\"is_churn\",\"msno\")\n df_val=utils.over_sample(\"amiable-octane-267022.kkbox.output_val_1\",\"amiable-octane-267022\")\n X_val, y_val =utils._feature_label_split(df_val,\"is_churn\",\"msno\")\n\n estimator.fit(X_train, y_train)\n f1_scorer = make_scorer(f1_score)\n accuracy_scorer =make_scorer(accuracy_score)\n\n if metadata.HYPERPARAMTER_TUNING:\n scores=model_selection.cross_val_score(estimator, X_val, y_val, cv=3,scoring=f1_scorer)\n #,scoring=f1_scorer\n\n logging.info('Score: %s', scores)\n\n #tune hyper\n hpt = hypertune.HyperTune()\n hpt.report_hyperparameter_tuning_metric(\n hyperparameter_metric_tag='F1_SCORE',\n metric_value=np.mean(scores),\n global_step=10000)\n \n#joblib.dump(estimator, 'model.joblib')\n\n # Write model and eval metrics to `output_dir`\n model_output_path = os.path.join(output_dir, 'model',metadata.MODEL_FILE_NAME)\n \n utils.dump_object(estimator, model_output_path)",
"def train(self, hyps):\n\n # Print Hyperparameters To Screen\n items = list(hyps.items())\n for k, v in sorted(items):\n print(k+\":\", v)\n\n # Make Save Files\n if \"save_folder\" in hyps:\n save_folder = hyps['save_folder']\n else:\n save_folder = \"./saved_data/\"\n\n if not os.path.exists(save_folder):\n os.mkdir(save_folder)\n base_name = save_folder + hyps['exp_name']\n net_save_file = base_name+\"_net.p\"\n best_net_file = base_name+\"_best.p\"\n optim_save_file = base_name+\"_optim.p\"\n log_file = base_name+\"_log.txt\"\n if hyps['resume']: log = open(log_file, 'a')\n else: log = open(log_file, 'w')\n for k, v in sorted(items):\n log.write(k+\":\"+str(v)+\"\\n\")\n\n # Miscellaneous Variable Prep\n logger = Logger()\n shared_len = hyps['n_tsteps']*hyps['n_rollouts']\n env = gym.make(hyps['env_type'])\n obs = env.reset()\n prepped = hyps['preprocess'](obs)\n hyps['state_shape'] = [hyps['n_frame_stack']] + [*prepped.shape[1:]]\n if hyps['env_type'] == \"Pong-v0\":\n action_size = 3\n else:\n action_size = env.action_space.n*(hyps['env_type']!=\"Pong-v0\")\n hyps['action_shift'] = (4-action_size)*(hyps['env_type']==\"Pong-v0\") \n print(\"Obs Shape:,\",obs.shape)\n print(\"Prep Shape:,\",prepped.shape)\n print(\"State Shape:,\",hyps['state_shape'])\n print(\"Num Samples Per Update:\", shared_len)\n print(\"Samples Wasted in Update:\", shared_len % hyps['batch_size'])\n del env\n\n # Make Network\n net = hyps['model'](hyps['state_shape'],action_size,h_size=hyps['h_size'],bnorm=hyps['use_bnorm'])\n if hyps['resume']:\n net.load_state_dict(torch.load(net_save_file))\n base_net = copy.deepcopy(net)\n net = cuda_if(net)\n net.share_memory()\n base_net = cuda_if(base_net)\n\n # Prepare Shared Variables\n shared_data = {'states': cuda_if(torch.zeros(shared_len, *hyps['state_shape']).share_memory_()),\n 'rewards': cuda_if(torch.zeros(shared_len).share_memory_()),\n 'deltas': cuda_if(torch.zeros(shared_len).share_memory_()),\n 'dones': cuda_if(torch.zeros(shared_len).share_memory_()),\n 'actions': torch.zeros(shared_len).long().share_memory_()}\n if net.is_recurrent:\n shared_data['h_states'] = cuda_if(torch.zeros(shared_len, hyps['h_size']).share_memory_())\n n_rollouts = hyps['n_rollouts']\n gate_q = mp.Queue(n_rollouts)\n stop_q = mp.Queue(n_rollouts)\n reward_q = mp.Queue(1)\n reward_q.put(-1)\n\n # Make Runners\n runners = []\n for i in range(hyps['n_envs']):\n runner = Runner(shared_data, hyps, gate_q, stop_q, reward_q)\n runners.append(runner)\n\n # Start Data Collection\n print(\"Making New Processes\")\n procs = []\n for i in range(len(runners)):\n proc = mp.Process(target=runners[i].run, args=(net,))\n procs.append(proc)\n proc.start()\n print(i, \"/\", len(runners), end='\\r')\n col_start_time = time.time()\n for i in range(n_rollouts):\n gate_q.put(i)\n\n # Make Updater\n updater = Updater(base_net, hyps)\n if hyps['resume']:\n updater.optim.load_state_dict(torch.load(optim_save_file))\n updater.optim.zero_grad()\n updater.net.train(mode=True)\n updater.net.req_grads(True)\n\n # Prepare Decay Precursors\n entr_coef_diff = hyps['entr_coef'] - hyps['entr_coef_low']\n epsilon_diff = hyps['epsilon'] - hyps['epsilon_low']\n lr_diff = hyps['lr'] - hyps['lr_low']\n\n # Training Loop\n past_rews = deque([0]*hyps['n_past_rews'])\n last_avg_rew = 0\n best_rew_diff = 0\n best_avg_rew = -1000\n epoch = 0\n T = 0\n while T < hyps['max_tsteps']:\n basetime = time.time()\n epoch += 1\n\n # Collect data\n for i in range(n_rollouts):\n stop_q.get()\n collection_time = time.time() - col_start_time\n\n T += shared_len\n\n # Reward Stats\n avg_reward = reward_q.get()\n reward_q.put(avg_reward)\n last_avg_rew = avg_reward\n if avg_reward > best_avg_rew:\n best_avg_rew = avg_reward\n updater.save_model(best_net_file, None)\n\n # Calculate the Loss and Update nets\n start_time = time.time()\n updater.update_model(shared_data)\n update_time = time.time() - start_time\n net.load_state_dict(updater.net.state_dict()) # update all collector nets\n \n # Resume Data Collection\n col_start_time = time.time()\n for i in range(n_rollouts):\n gate_q.put(i)\n\n # Decay HyperParameters\n if hyps['decay_eps']:\n updater.epsilon = (1-T/(hyps['max_tsteps']))*epsilon_diff + hyps['epsilon_low']\n print(\"New Eps:\", updater.epsilon)\n if hyps['decay_lr']:\n new_lr = (1-T/(hyps['max_tsteps']))*lr_diff + hyps['lr_low']\n updater.new_lr(new_lr)\n print(\"New lr:\", new_lr)\n if hyps['decay_entr']:\n updater.entr_coef = entr_coef_diff*(1-T/(hyps['max_tsteps']))+hyps['entr_coef_low']\n print(\"New Entr:\", updater.entr_coef)\n\n # Periodically save model\n if epoch % 10 == 0:\n updater.save_model(net_save_file, optim_save_file)\n\n # Print Epoch Data\n past_rews.popleft()\n past_rews.append(avg_reward)\n max_rew, min_rew = deque_maxmin(past_rews)\n updater.print_statistics()\n avg_action = shared_data['actions'].float().mean().item()\n print(\"Epoch\", epoch, \"– T =\", T)\n print(\"Grad Norm:\",float(updater.norm),\"– Avg Action:\",avg_action,\"– Best AvgRew:\",best_avg_rew)\n print(\"Avg Rew:\", avg_reward, \"– High:\", max_rew, \"– Low:\", min_rew, end='\\n')\n updater.log_statistics(log, T, avg_reward, avg_action, best_avg_rew)\n updater.info['AvgRew'] = avg_reward\n logger.append(updater.info, x_val=T)\n\n # Check for memory leaks\n gc.collect()\n max_mem_used = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss\n print(\"Time:\", time.time()-basetime, \"– Collection:\", collection_time, \"– Update:\", update_time)\n if 'hyp_search_count' in hyps and hyps['hyp_search_count'] > 0 and hyps['search_id'] != None:\n print(\"Search:\", hyps['search_id'], \"/\", hyps['hyp_search_count'])\n print(\"Memory Used: {:.2f} memory\\n\".format(max_mem_used / 1024))\n\n logger.make_plots(base_name)\n log.write(\"\\nBestRew:\"+str(best_avg_rew))\n log.close()\n # Close processes\n for p in procs:\n p.terminate()\n return best_avg_rew",
"def train(model, x_train, y_train, x_valid, y_valid, config):\n \n epochs = config['epochs']\n threshold = config['early_stop_epoch']\n alpha = config['learning_rate']\n# val_loss = 10000*np.ones((epochs,1))\n beta = config['momentum_gamma']\n batch_size = config['batch_size']\n \n N = x_train.shape[0]\n num_batches = int((N+batch_size -1 )/ batch_size)\n \n best_weight = []\n best_epoch = []\n best_bias = []\n #print(len(model.layers))\n train_loss_list = []\n \n train_acc_list = []\n val_acc_list = []\n val_loss_list = []\n \n counter = 0\n \n lam = 0.0001\n \n \n for i in range(1, epochs+1):\n shuffled_indices = np.random.permutation(range(N))\n \n for batch in range(num_batches):\n minibatch_indices = shuffled_indices[batch_size*batch:min(batch_size*(batch+1), N)]\n #print(len(minibatch_indices))\n xbatch = x_train[minibatch_indices, :]\n ybatch = y_train[minibatch_indices, :]\n #print(ybatch.shape)\n y, loss = model(xbatch, ybatch)\n \n model.backward() \n #weight update and storing\n for k in range(0, len(config['layer_specs']), 2):\n mom_w = -model.layers[k].d_v_w * beta + alpha*(model.layers[k].d_w + lam*model.layers[k].w )\n mom_b = -model.layers[k].d_v_b * beta + alpha*(model.layers[k].d_b + lam*model.layers[k].b )\n model.layers[k].w = model.layers[k].w - (mom_w )\n model.layers[k].b = model.layers[k].b - (mom_b )\n model.layers[k].d_v_w = -mom_w\n model.layers[k].d_v_b = -mom_b \n\n y, loss = model(x_train, y_train) \n train_loss_list.append(loss)\n \n train_pred = np.argmax(y, axis=1) \n acc = np.mean(np.argwhere(y_train==1)[:,1]==train_pred) \n \n train_acc_list.append(acc)\n \n \n #print(\"Training acc for epoch \", i, \" is:\\n\", acc) \n #print(\"Training loss for epoch \", i, \" is:\\n\", loss) \n val_y, val_loss = model(x_valid, y_valid)\n val_loss_list.append(val_loss)\n\n val_pred = np.argmax(val_y, axis=1) \n acc = np.mean(np.argwhere(y_valid==1)[:,1]==val_pred) \n val_acc_list.append(acc)\n \n #print(\"Validation acc for epoch \", i, \" is:\\n\", acc) \n #print(\"Validation loss for epoch \", i, \" is:\\n\", val_loss)\n if(i>1 and val_loss <min(val_loss_list[:-1])):\n #update best weights\n counter = 0\n weight = []\n bias = []\n for k in range(0, len(config['layer_specs']), 2):\n weight.append(model.layers[k].w)\n bias.append(model.layers[k].b)\n best_weight = weight \n best_bias = bias\n best_epoch = i\n else:\n counter +=1\n \n if counter > threshold:\n print(\"best epoch:\", best_epoch)\n break\n\n# if(i>=6 and val_loss[i-1]>=val_loss[i-2] and val_loss[i-2]>=val_loss[i-3]and val_loss[i-3]>=val_loss[i-4]and val_loss[i-4]>=val_loss[i-5]and val_loss[i-5]>=val_loss[i-6]):\n# break\n \n print(len(best_weight))\n print('Epoch: ', i)\n #print(val_loss)\n p = 0\n for k in range(0, len(config['layer_specs']), 2):\n model.layers[k].w = best_weight[p]\n model.layers[k].b = best_bias[p]\n p = p + 1\n \n return train_loss_list, val_loss_list, train_acc_list, val_acc_list\n raise NotImplementedError(\"Train method not implemented\")",
"def train(network_def, target_params, optimizer, states, actions, next_states, rewards,\n terminals, loss_weights, target_opt, num_tau_samples, num_tau_prime_samples,\n num_quantile_samples, cumulative_gamma, double_dqn, kappa, tau,alpha,clip_value_min, num_actions,rng):\n online_params = optimizer.target\n def loss_fn(params, rng_input, target_quantile_vals, loss_multipliers):\n def online(state):\n return network_def.apply(params, state, num_quantiles=num_tau_samples, rng=rng_input)\n\n model_output = jax.vmap(online)(states)\n quantile_values = model_output.quantile_values\n quantiles = model_output.quantiles\n chosen_action_quantile_values = jax.vmap(lambda x, y: x[:, y][:, None])(\n quantile_values, actions)\n # Shape of bellman_erors and huber_loss:\n # batch_size x num_tau_prime_samples x num_tau_samples x 1.\n bellman_errors = (target_quantile_vals[:, :, None, :] -\n chosen_action_quantile_values[:, None, :, :])\n # The huber loss (see Section 2.3 of the paper) is defined via two cases:\n # case_one: |bellman_errors| <= kappa\n # case_two: |bellman_errors| > kappa\n huber_loss_case_one = (\n (jnp.abs(bellman_errors) <= kappa).astype(jnp.float32) *\n 0.5 * bellman_errors ** 2)\n huber_loss_case_two = (\n (jnp.abs(bellman_errors) > kappa).astype(jnp.float32) *\n kappa * (jnp.abs(bellman_errors) - 0.5 * kappa))\n huber_loss = huber_loss_case_one + huber_loss_case_two\n # Tile by num_tau_prime_samples along a new dimension. Shape is now\n # batch_size x num_tau_prime_samples x num_tau_samples x 1.\n # These quantiles will be used for computation of the quantile huber loss\n # below (see section 2.3 of the paper).\n quantiles = jnp.tile(quantiles[:, None, :, :],\n [1, num_tau_prime_samples, 1, 1]).astype(jnp.float32)\n # Shape: batch_size x num_tau_prime_samples x num_tau_samples x 1.\n quantile_huber_loss = (jnp.abs(quantiles - jax.lax.stop_gradient(\n (bellman_errors < 0).astype(jnp.float32))) * huber_loss) / kappa\n # Sum over current quantile value (num_tau_samples) dimension,\n # average over target quantile value (num_tau_prime_samples) dimension.\n # Shape: batch_size x num_tau_prime_samples x 1.\n loss = jnp.sum(quantile_huber_loss, axis=2)\n loss = jnp.squeeze(jnp.mean(loss, axis=1), axis=-1)\n\n mean_loss = jnp.mean(loss_multipliers * loss)\n\n return mean_loss, loss\n\n grad_fn = jax.value_and_grad(loss_fn, has_aux=True)\n\n if target_opt == 0:\n rng, target_quantile_vals = target_quantile_values_fun(\n network_def,\n online_params,\n target_params,\n next_states,\n rewards,\n terminals,\n num_tau_prime_samples,\n num_quantile_samples,\n cumulative_gamma,\n double_dqn,\n rng)\n\n elif target_opt == 1:\n rng, target_quantile_vals = munchau_target_quantile_values_fun(\n network_def,\n online_params,\n target_params,\n states,\n actions,\n next_states,\n rewards,\n terminals,\n num_tau_prime_samples,\n num_quantile_samples,\n cumulative_gamma,\n double_dqn,\n rng,\n tau,\n alpha,\n clip_value_min,\n num_actions\n )\n\n else:\n print('error')\n\n rng, rng_input = jax.random.split(rng)\n (mean_loss, loss), grad = grad_fn(online_params, rng_input, target_quantile_vals, loss_weights)\n optimizer = optimizer.apply_gradient(grad)\n return rng, optimizer, loss, mean_loss",
"def train(args):\n # prepare environment\n brain_name = env.brain_names[0]\n brain = env.brains[brain_name]\n env_info = env.reset(train_mode=True)[brain_name]\n\n num_agents = len(env_info.agents)\n print('Number of agents:', num_agents)\n\n # size of each action\n action_size = brain.vector_action_space_size\n print('Size of each action:', action_size)\n\n # examine the state space\n states = env_info.vector_observations\n state_size = states.shape[1]\n print('There are {} agents. Each observes a state with length: {}'.format(\n states.shape[0], state_size))\n print('The state for the first agent looks like:', states[0])\n\n # Crate instance of MADDPG Class, mainly possible to control the model dimensions, learnrates and batch sizes\n agent = MADDPG(state_size,\n action_size,\n lr_actor=args.lr_actor,\n lr_critic=args.lr_critic,\n lr_decay=args.lr_decay,\n replay_buff_size=args.replay_buff_size,\n gamma=args.gamma,\n batch_size=args.batch_size,\n random_seed=args.random_seed,\n soft_update_tau=args.soft_update_tau,\n actor_layer_dim_1=args.actor_layer_dim_1,\n actor_layer_dim_2=args.actor_layer_dim_2,\n actor_layer_dim_3=args.actor_layer_dim_3,\n critic_layer_dim_1=args.critic_layer_dim_1,\n critic_layer_dim_2=args.critic_layer_dim_2,\n critic_layer_dim_3=args.critic_layer_dim_3\n\n )\n\n total_rewards = []\n avg_scores = []\n max_avg_score = -1\n max_score = -1\n threshold_init = 20\n noise_t = args.epsilon\n noise_decay = args.epsilon_decay\n latest_avg_score = -1\n # for early-stopping training if consistently worsen for # episodes\n worsen_tolerance = threshold_init\n for i_episode in range(1, 1+args.num_episodes):\n\n env_inst = env.reset(train_mode=True)[\n brain_name] # reset the environment\n states = env_inst.vector_observations # get the current state\n # initialize score array\n scores = np.zeros(num_agents)\n dones = [False]*num_agents\n while not np.any(dones):\n # select an action\n actions = agent.act(states, noise_t)\n # send the action to the environment\n env_inst = env.step(actions)[brain_name]\n next_states = env_inst.vector_observations # get the next state\n rewards = env_inst.rewards # get the reward\n dones = env_inst.local_done # see if episode has finished\n agent.update(states, actions, rewards, next_states, dones)\n\n noise_t *= noise_decay\n scores += rewards # update scores\n states = next_states\n\n episode_score = np.max(scores)\n total_rewards.append(episode_score)\n print(\"\\rEpisodic {} Score: {:.4f}\\t Avg Score: {:.4f}\".format(\n i_episode, episode_score, latest_avg_score), end=' ')\n\n if max_score <= episode_score:\n max_score = episode_score\n # save best model so far\n agent.save(\n \"chkpts/{}/{:02d}_best_model.checkpoint\".format(args.model_path, args.loop_counter))\n\n # record avg score for the latest 100 steps\n if len(total_rewards) >= args.test_n_run:\n latest_avg_score = sum(\n total_rewards[(len(total_rewards)-args.test_n_run):]) / args.test_n_run\n avg_scores.append(latest_avg_score)\n\n if max_avg_score <= latest_avg_score: # record better results\n worsen_tolerance = threshold_init # re-count tolerance\n max_avg_score = latest_avg_score\n else:\n if max_avg_score > 0.5:\n worsen_tolerance -= 1 # count worsening counts\n print(\"Loaded from last best model.\")\n # continue from last best-model\n agent.load(\n \"chkpts/{}/{:02d}_best_model.checkpoint\".format(args.model_path, args.loop_counter))\n if worsen_tolerance <= 0: # earliy stop training\n print(\"Early Stop Training.\")\n break\n del agent\n return total_rewards",
"def main(train, val, test):\n\n wandb.init(project=\"arg-qual\", tags=[args.node_feat, args.quality_dim, str(args.epochs), str(args.lr)])\n\n device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\n dataloader_train = DataLoader(\n trainset,\n batch_size=10,\n collate_fn=collate,\n drop_last=False,\n shuffle=True)\n\n dataloader_val = DataLoader(\n val,\n batch_size=10,\n collate_fn=collate,\n drop_last=False,\n shuffle=True)\n\n in_dim = trainset[0][0].ndata['x'].shape[1] # define feature dim\n out_dim = 2\n model = GraphGATClassifier(in_dim, 16, out_dim)\n loss_func = nn.CrossEntropyLoss()\n opt = torch.optim.Adam(model.parameters(), lr=args.lr)\n\n set_seed(args)\n\n val_no_improv = 0\n min_val_acc = 0\n\n epoch_losses = []\n for epoch in range(args.epochs):\n model.train()\n epoch_loss = 0\n epoch_acc = 0\n for iter, (batched_graphs, labels) in enumerate(dataloader_train):\n logits = model(batched_graphs)\n loss = loss_func(logits, labels)\n opt.zero_grad()\n loss.backward()\n opt.step()\n epoch_loss += loss.detach().item()\n acc = compute_acc(model, batched_graphs, labels)\n epoch_acc += acc\n\n epoch_loss /= (iter + 1)\n epoch_acc /= (iter + 1)\n epoch_losses.append(epoch_loss)\n\n print(\"Epoch {} | Loss {:.4f} | Accuracy {:.4f}\".format(epoch + 1, epoch_loss, epoch_acc))\n wandb.log({'loss': epoch_loss})\n\n # Evaluate during training\n val_acc = 0\n for iter, (batched_graphs, labels) in enumerate(dataloader_val):\n acc = compute_acc(model, batched_graphs, labels)\n val_acc += acc\n val_acc /= (iter + 1)\n print(\"Validation accuracy {:.2%} at epoch {}\".format(val_acc, epoch))\n wandb.log({'val_acc': val_acc})\n '''\n if epoch > 100:\n if val_acc == min_val_acc:\n val_no_improv += 1\n else:\n min_val_acc = val_acc\n val_no_improv = 0\n if val_no_improv == 10:\n print(\"Early stopping!\")\n break\n '''\n\n # Evaluation\n dataloader_test = DataLoader(\n test,\n batch_size=10,\n collate_fn=collate,\n drop_last=False,\n shuffle=True)\n\n test_acc = 0\n for iter, (batched_graphs, labels) in enumerate(dataloader_test):\n acc = compute_acc(model, batched_graphs, labels)\n test_acc += acc\n test_acc /= (iter + 1)\n print(\"Test accuracy {:.2%}\".format(test_acc))\n wandb.log({'test_acc': test_acc})",
"def train_all(X_train_fuse, Y_train, X_dev_fuse, Y_dev, R_train, R_dev, hyperparams):",
"def gradb_train(X_train, y_train, write=False):\n model_gradb = GradientBoostingClassifier(loss='deviance',\n learning_rate=0.2,\n n_estimators=100,\n subsample=0.9,\n #min_samples_leaf=10,\n max_depth=6,\n random_state=321, verbose=0)\n model_gradb.fit(X_train, y_train)\n if write:\n pickle.dump(model_gradb, open(obj_save_path+'model_gradb.p', 'wb'))\n #model_gradb = pickle.load(open(obj_save_path+'model_gradb.p', 'rb'))\n return model_gradb",
"def trainNet():",
"def train(self, X, y, batch_size=5, num_epochs=10, alpha=0.1, gamma=0.9, learning=\"Delta\"):\n rem = int(np.ceil(len(X[0])/batch_size))\n for epoch in range(num_epochs):\n art = 0;\n for sample in range(rem):\n end = art + batch_size\n\n # Get a sample (column from X and Y) where the size of the sample is given by the batch size\n sampleX = X[:, art : end]\n sampleY = y[:, art : end]\n #print (sampleX)\n\n # Get the prediction\n results = self.predict(sampleX)\n art += batch_size\n\n if learning == \"Delta\" or learning == \"delta\":\n # Calculate e\n e = np.subtract(sampleY, results)\n\n # Calculate e dot p, where p is the input matrix\n ep = np.dot(e, np.transpose(sampleX))\n\n # Multiply this new matrix by the scalar alpha\n aep = np.multiply(alpha, ep)\n\n # Calculate the new weights along with the bias\n self.weights = np.add(self.weights, aep)\n \n elif learning == \"Filtered\" or learning == \"filtered\":\n\n # Calculate e dot p, where p is the input matrix\n ep = np.dot(sampleY, np.transpose(sampleX))\n\n # Multiply this new matrix by the scalar alpha\n aep = np.multiply(alpha, ep)\n\n # Multiply the old weights by some scalar gamma\n gw = np.multiply(1 - gamma, self.weights)\n\n self.weights = np.add(gw, aep)\n\n elif learning == \"Unsupervised_hebb\" or learning == \"unsupervised_hebb\":\n # Add a row of one's to the top of the input matrix\n #newX = np.vstack((np.array([1 for column in range(sampleX.shape[1])]), sampleX))\n\n # Calculate e dot p, where p is the input matrix\n ep = np.dot(results, np.transpose(sampleX))\n\n # Multiply this new matrix by the scalar alpha\n aep = np.multiply(alpha, ep)\n\n # Calculate the new weights along with the bias\n self.weights = np.add(self.weights, aep)",
"def train( self, trainingData, trainingLabels, validationData, validationLabels ):\n\n self.features = trainingData[0].keys() # could be useful later\n # DO NOT ZERO OUT YOUR WEIGHTS BEFORE STARTING TRAINING, OR\n # THE AUTOGRADER WILL LIKELY DEDUCT POINTS.\n for iteration in range(self.max_iterations):\n #pdb.set_trace() # esto es un break point para que puedas comprobar el formato de los datos\n print (\"Starting iteration \", iteration, \"...\")\n for i in range(len(trainingData)):#training data\n max = -10000000\n for j in range(len(self.weights)):\n prod = np.dot(self.weights[j], trainingData[i]) #este sería x0 (en la primera vuelta) (xj)\n if (prod > max):\n max=prod #en max guardamos la distancia a la instancia que más cerca está de la que estamos recorriendo\n indclase=j #guardas el índice de la clase a la que predices que pertenece\n\n if(indclase != trainingLabels[i]):\n # recalcular pesos\n self.weights[trainingLabels[i]].__radd__(trainingData[i]) #honek jarraian egiten du gehiketa pisu guztientzat\n #pdb.set_trace() # esto es un break point para que puedas comprobar el formato de los datos\n self.weights[indclase].__sub__(trainingData[i]) #honek jarraian egiten du kenketa pisu guztientzat\n\n\n\n\n\n ########################################################################################\n # 1. i es el indice de un ejemplo (un item, f(x) de un ejemplo) del conjunto de entrenamiento.\n # 2. Asi pues, en cada vuelta de este loop se trata un solo ejemplo\n # por cada ejemplo calculareis el producto punto (dotProduct) w*item\n # NOTAS: Recordad que cada ejemplo viene representado por varios rasgos (o features), es decir, es un vector de rasgos, tantos como nos marca el atributo self.features.\n # Asi cada ejemplo es de dimension 1 filas y self.features).\n # La dimension del vector w tambien es self.features, es decir, habra tantos pesos en w_rasgo dentro de w como rasgos haya en cada item de ejemplo\n # Recordad tambien que es una clasificacion multiclase en este caso. Hay tantas clases como nos marca el atributo self.legalLabels\n #########################################################################################",
"def train_model(self, model, hyperparameter_dict, feature_col):\n if model == 'random_forest':\n clf = RandomForestClassifier(max_depth=hyperparameter_dict['depth'], n_estimators = hyperparameter_dict['tree_num'], random_state = 2021)\n elif model == 'XGBoost':\n clf = XGBClassifier(objective='binary:logistic', random_state=2021, max_depth = hyperparameter_dict['depth'], n_estimators = hyperparameter_dict['tree_num'])\n elif model == 'gbt':\n clf = GradientBoostingClassifier(n_estimators = hyperparameter_dict['tree_num'], max_depth = hyperparameter_dict['depth'], random_state = 2021)\n else:\n print(f'please enter model among [\"random_forest\", \"XGBoost\", \"gbt\"]')\n # return\n X_train = self.get_train_X()[feature_col]\n y_train = self.get_train_y()\n X_val = self.get_val_X()[feature_col]\n y_val = self.get_val_y()\n X_test = self.get_test_X()[feature_col]\n y_test = self.get_test_y()\n clf.fit(X_train, y_train)\n now_depth = hyperparameter_dict['depth']\n now_tree_num = hyperparameter_dict['tree_num']\n print(f'depth is : {now_depth}, tree_num : {now_tree_num}')\n\n train_result = clf.predict_proba(X_train)\n train_result = train_result[:,1]\n fpr, tpr, thresholds = metrics.roc_curve(y_train, train_result)\n print(f'train auc : {metrics.auc(fpr, tpr)}')\n\n val_result = clf.predict_proba(X_val)\n val_result = val_result[:,1]\n fpr, tpr, thresholds = metrics.roc_curve(y_val, val_result)\n print(f'validation auc : {metrics.auc(fpr, tpr)}')\n\n test_result = clf.predict_proba(X_test)\n test_result = test_result[:,1]\n fpr, tpr, thresholds = metrics.roc_curve(y_test, test_result)\n print(f'Test auc : {metrics.auc(fpr, tpr)}')\n \"\"\"\n plot aoc curve and lift chart\n \"\"\"\n self.plot_roc_graph(clf, feature_col)\n self.set_model(clf)\n score_list = pd.Series(test_result, name='score').to_frame().reset_index(drop=True)\n test_key = self.get_test()[['idd', 'ft_data_dt']].reset_index(drop=True)\n test = pd.concat([test_key, score_list], axis = 1)\n self.set_final_score(test)\n \n self.plot_lift_chart(test_result, y_test.to_numpy(), 20, 1)\n print(f'bin of score from infected patients')\n self.plot_lift_chart(test_result, y_test.to_numpy(), 20, 0)\n print(f'bin of score from non-infected patients')\n print('')\n # save model\n filename = model + '.sav'\n print(f'save model to {filename}')\n pickle.dump(clf, open(filename, 'wb'))\n return clf, filename",
"def train(self, training_steps=10):",
"def train():\n pass",
"def main():\n data = load_data()\n analyze_features(data['full_features'])\n model = train(data)\n\n with open('model.pickle', 'wb') as f:\n pickle.dump(model, f)\n evaluate(model, data)",
"def __init__(self, model, model_name, preds, confidence, gt_coords):\r\n\r\n if GuidedBackprop.GuidedReluRegistered is False:\r\n @tf.RegisterGradient(\"GuidedRelu\")\r\n def _GuidedReluGrad(op, grad):\r\n gate_g = tf.cast(grad > 0, \"float32\")\r\n gate_y = tf.cast(op.outputs[0] > 0, \"float32\")\r\n return gate_y * gate_g * grad\r\n GuidedBackprop.GuidedReluRegistered = True\r\n \r\n \"\"\" \r\n Create a dummy session to set the learning phase to 0 (test mode in keras) without \r\n inteferring with the session in the original keras model. This is a workaround\r\n for the problem that tf.gradients returns error with keras models that contains \r\n Dropout or BatchNormalization.\r\n\r\n Basic Idea: save keras model => create new keras model with learning phase set to 0 => save\r\n the tensorflow graph => create new tensorflow graph with ReLU replaced by GuiededReLU.\r\n \"\"\" \r\n # Set to test phase\r\n K.set_learning_phase(0) \r\n \r\n # Load training model\r\n if 'train' in model_name:\r\n print('Loading model ...')\r\n model = load_model('./tmp/gb_keras_train.h5')\r\n\r\n session = K.get_session()\r\n tf.compat.v1.train.export_meta_graph()\r\n \r\n saver = tf.compat.v1.train.Saver()\r\n saver.save(session, './tmp/guided_backprop_ckpt')\r\n\r\n self.guided_graph = tf.Graph()\r\n with self.guided_graph.as_default():\r\n self.guided_sess = tf.Session(graph = self.guided_graph)\r\n\r\n with self.guided_graph.gradient_override_map({'LeakyRelu': 'GuidedRelu'}): # replace LeakyRelu with GuidedRelu\r\n saver = tf.compat.v1.train.import_meta_graph('./tmp/guided_backprop_ckpt.meta')\r\n saver.restore(self.guided_sess, './tmp/guided_backprop_ckpt')\r\n\r\n output_list = []\r\n\r\n if 'train' in model_name: \r\n batch_idx = 0 # which image in the batch (assume batch size =1)\r\n anchor_box_idx = 2 # [20,20]\r\n prob_obj_idx = 4 # index for probability of a detection\r\n\r\n grid_hs, grid_ws = grid_coords(gt_coords)\r\n gt_grids = list(zip(grid_hs, grid_ws))\r\n\r\n train_output = self.guided_graph.get_tensor_by_name(model.output.name) # 64,64,3,6\r\n \r\n for grid in gt_grids:\r\n h = grid[0]\r\n w = grid[1]\r\n out_tensor = self.guided_graph.get_tensor_by_name(model.output.name)[batch_idx, h, w, anchor_box_idx, prob_obj_idx]\r\n output_list.append(out_tensor)\r\n \r\n elif 'infer' in model_name:\r\n preds = preds.tolist()\r\n for idx, p in enumerate(preds):\r\n p = list(p)\r\n if p[5] > confidence:\r\n out_tensor = self.guided_graph.get_tensor_by_name(model.output.name)[0,idx,5]\r\n output_list.append(out_tensor)\r\n \r\n self.imported_y = output_list\r\n self.imported_x = self.guided_graph.get_tensor_by_name(model.input.name)\r\n self.guided_grads_node = tf.gradients(self.imported_y, self.imported_x) # calculate gradient of class score with respect to input\r",
"def train_and_evaluate(OUTPUT_DIR,do_train = True,do_eval=True):\n\n\t\n\tBATCH_SIZE = 32\n\tLEARNING_RATE = 2e-5\n\tNUM_TRAIN_EPOCHS = 5.0\n\n\t#in this steps lr will be low and training will be slow\n\tWARMUP_PROPORTION = 0.1\n\n\n\n\tif os.path.exists(OUTPUT_DIR) and os.listdir(OUTPUT_DIR) and do_train:\n\t\traise ValueError(\"Output directory ({}) already exists and is not empty.\".format(OUTPUT_DIR))\n\tif not os.path.exists(OUTPUT_DIR):\n\t\tos.makedirs(OUTPUT_DIR)\n\t\t\n\t#create train and test data\n\n\ttrain_sents,train_labels,test_sents,test_labels = create_train_test(\"ADE/DRUG-AE.rel\",\"ADE/negative_data_AE.rel\")\n\n\tdevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\n\ttokenizer = BertTokenizer.from_pretrained(\"bert-base-uncased\", do_lower_case=True)\n\n\tif do_train:\n\n\t\ttrain_examples = [InputExample(guid=None,text_a=sentence,text_b=None,label=label) for sentence,label in zip(train_sents, train_labels)]\n\t\tnum_train_examples = len(train_examples)\n\n\t\tnum_train_steps = int(math.ceil(num_train_examples / BATCH_SIZE * NUM_TRAIN_EPOCHS))\n\t\tnum_warmup_steps = int(num_train_steps * WARMUP_PROPORTION)\n\n\t\tmodel = BertForSequenceClassification.from_pretrained(\"bert-base-uncased\",num_labels = num_labels)\n\t\tmodel.to(device)\n\n\t\tparam_optimizer = list(model.named_parameters())\n\t\tno_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']\n\t\toptimizer_grouped_parameters = [\n\t\t\t{'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01},\n\t\t\t{'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}\n\t\t\t]\n\n\t\toptimizer = BertAdam(optimizer_grouped_parameters,lr=LEARNING_RATE,warmup=WARMUP_PROPORTION,t_total=num_train_steps)\n\n\t\tglobal_step = 0\n\t\tnb_tr_steps = 0\n\t\ttr_loss = 0\n\n\t\ttrain_features = convert_examples_to_features(\n\t\t\ttrain_examples, label_list, MAX_SEQ_LENGTH, tokenizer)\n\n\n\t\tlogger.info(\"***** Running training *****\")\n\t\tlogger.info(\" Num examples = %d\", num_train_examples)\n\t\tlogger.info(\" Batch size = %d\", BATCH_SIZE)\n\t\tlogger.info(\" Num steps = %d\", num_train_steps)\n\n\n\t\tall_input_ids = torch.tensor([f.input_ids for f in train_features], dtype=torch.long)\n\t\tall_input_mask = torch.tensor([f.input_mask for f in train_features], dtype=torch.long)\n\t\tall_segment_ids = torch.tensor([f.segment_ids for f in train_features], dtype=torch.long)\n\t\tall_label_ids = torch.tensor([f.label_id for f in train_features], dtype=torch.long)\n\n\t\ttrain_data = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_label_ids)\n\t\ttrain_sampler = RandomSampler(train_data)\n\n\t\ttrain_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=BATCH_SIZE)\n\n\t\tmodel.train()\n\t\t# for name, param in model.named_parameters():\n\t\t# if param.requires_grad:\n\t\t# print(name)\n\t\t# return\n\t\tfor _ in trange(int(NUM_TRAIN_EPOCHS), desc=\"Epoch\"):\n\t\t\ttr_loss = 0\n\t\t\tnb_tr_examples, nb_tr_steps = 0, 0\n\t\t\tfor step, batch in enumerate(tqdm(train_dataloader, desc=\"Iteration\")):\n\t\t\t\tbatch = tuple(t.to(device) for t in batch)\n\t\t\t\tinput_ids, input_mask, segment_ids, label_id = batch\n\t\t\t\tloss = model(input_ids, segment_ids, input_mask, label_id)\n\t\t\t\tloss.backward()\n\n\t\t\t\ttr_loss += loss.item()\n\t\t\t\tnb_tr_examples += input_ids.size(0)\n\t\t\t\tnb_tr_steps += 1\n\t\t\t\toptimizer.step()\n\t\t\t\toptimizer.zero_grad()\n\t\t\t\tglobal_step += 1\n\t\t\tprint(tr_loss)\n\n\t\t# Save a trained model and the associated configuration\n\t\tmodel_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self\n\t\toutput_model_file = os.path.join(OUTPUT_DIR, WEIGHTS_NAME)\n\t\ttorch.save(model_to_save.state_dict(), output_model_file)\n\t\toutput_config_file = os.path.join(OUTPUT_DIR, CONFIG_NAME)\n\t\twith open(output_config_file, 'w') as f:\n\t\t\tf.write(model_to_save.config.to_json_string())\n\t\tlabel_map = {i : label for i, label in enumerate(label_list,1)} \n\t\tmodel_config = {\"bert_model\":\"bert-base-uncased\",\"do_lower\":True,\"max_seq_length\":MAX_SEQ_LENGTH,\"num_labels\":num_labels,\"label_map\":label_map}\n\t\tjson.dump(model_config,open(os.path.join(OUTPUT_DIR,\"model_config.json\"),\"w\"))\n\n\telse:\n\t\toutput_config_file = os.path.join(OUTPUT_DIR, CONFIG_NAME)\n\t\toutput_model_file = os.path.join(OUTPUT_DIR, WEIGHTS_NAME)\n\t\tconfig = BertConfig(output_config_file)\n\t\tmodel = BertForSequenceClassification(config, num_labels=num_labels)\n\t\tmodel.load_state_dict(torch.load(output_model_file))\n\n\tmodel.to(device)\n\n\tif do_eval:\n\n\t\tEVAL_BATCH_SIZE = 32\n\n\t\teval_examples = [InputExample(guid=None,text_a=sentence,text_b=None,label=label) for sentence,label in zip(test_sents, test_labels)]\n\t\tnum_eval_examples = len(eval_examples)\n\n\t\teval_features = convert_examples_to_features(\n\t\t\teval_examples, label_list, MAX_SEQ_LENGTH, tokenizer)\n\n\t\tlogger.info(\"***** Running evaluation *****\")\n\t\tlogger.info(\" Num examples = %d\", num_eval_examples)\n\t\tlogger.info(\" Batch size = %d\", EVAL_BATCH_SIZE)\n\t\tall_input_ids = torch.tensor([f.input_ids for f in eval_features], dtype=torch.long)\n\t\tall_input_mask = torch.tensor([f.input_mask for f in eval_features], dtype=torch.long)\n\t\tall_segment_ids = torch.tensor([f.segment_ids for f in eval_features], dtype=torch.long)\n\t\tall_label_ids = torch.tensor([f.label_id for f in eval_features], dtype=torch.long)\n\t\teval_data = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_label_ids) \n\t\t# # Run prediction for full data\n\t\teval_sampler = SequentialSampler(eval_data)\n\t\teval_dataloader = DataLoader(eval_data, sampler=eval_sampler, batch_size=EVAL_BATCH_SIZE)\n\t\tmodel.eval()\n\n\t\teval_loss, eval_accuracy = 0, 0\n\t\tnb_eval_steps, nb_eval_examples = 0, 0\n\t\ty_true = []\n\t\ty_pred = []\n\t\tlabel_map = {i : label for i, label in enumerate(label_list,1)}\n\t\tfor input_ids, input_mask, segment_ids, label_ids in tqdm(eval_dataloader, desc=\"Evaluating\"):\n\t\t\tinput_ids = input_ids.to(device)\n\t\t\tinput_mask = input_mask.to(device)\n\t\t\tsegment_ids = segment_ids.to(device)\n\t\t\tlabel_ids = label_ids.to(device)\n\n\t\t\twith torch.no_grad():\n\t\t\t\tlogits = model(input_ids, segment_ids, input_mask)\n\t\t\t\t\n\t\t\tlogits = torch.argmax(F.log_softmax(logits,dim=1),dim=1)\n\t\t\tlogits = logits.detach().cpu().numpy()\n\t\t\tlabel_ids = label_ids.to('cpu').numpy()\n\t\t\ty_pred.extend(logits)\n\t\t\ty_true.extend(label_ids)\n\t\tprint(len(y_pred))\n\t\tprint(len(y_true))\n\t\treport = classification_report(y_true, y_pred)\n\t\toutput_eval_file = os.path.join(OUTPUT_DIR, \"eval_results.txt\")\n\t\twith open(output_eval_file, \"w\") as writer:\n\t\t\tlogger.info(\"***** Eval results *****\")\n\t\t\tlogger.info(\"\\n%s\", report)\n\t\t\twriter.write(report)",
"def train_step(\n model,\n rng,\n state,\n batch,\n alpha_fn_dict,\n learning_rate_fn,\n weight_decay,\n metric_collector,\n):\n logging.info(\"train_step(batch=%s)\", batch)\n\n step = state.step + 1\n lr = learning_rate_fn(step)\n alpha_dict = jax.tree_map(lambda fn: fn(step), alpha_fn_dict)\n\n def loss_fn(params):\n variables = {\"params\": params}\n out = model.apply(variables, batch)\n\n # ------------------------------------------------------------------------\n # Compute the loss.\n pred_loss, stat_dict = out.compute_total_loss(batch, alpha_dict)\n\n # ------------------------------------------------------------------------\n # Weight Regularization\n weight_penalty_params = jax.tree_util.tree_leaves(variables[\"params\"])\n weight_l2 = sum(\n [jnp.sum(x**2) for x in weight_penalty_params if x.ndim > 1]\n )\n weight_penalty = weight_decay * 0.5 * weight_l2\n # ------------------------------------------------------------------------\n\n total_loss = pred_loss + weight_penalty\n stat_dict[\"weight_l2\"] = weight_l2\n\n return total_loss, stat_dict\n\n # ------------------------------------------------------------------------\n # Compute graidents\n grad_fn = jax.value_and_grad(loss_fn, has_aux=True)\n (loss, stat_dict), grad = grad_fn(state.params)\n\n # Compute average gradient across multiple workers.\n grad = jax.lax.pmean(grad, axis_name=\"batch\")\n\n # ------------------------------------------------------------------------\n # Update States\n new_state = state.apply_gradients(grads=grad)\n\n metrics_update = metric_collector.gather_from_model_output(\n total_loss=loss,\n learning_rate=lr,\n **stat_dict,\n )\n return new_state, metrics_update, rng",
"def evaluate(self, train_set, test_set, shuffle_batch=True,\n epochs=25, lr_decay=0.95, sqr_norm_lim=9,labels=None,model=None): \n cost = self.negative_log_likelihood(self.y) \n dropout_cost = self.dropout_negative_log_likelihood(self.y)\n # adadelta upgrades: dict of variable:delta\n grad_updates = self.sgd_updates_adadelta(dropout_cost, lr_decay, 1e-6, sqr_norm_lim)\n # shuffle dataset and assign to mini batches.\n # if dataset size is not a multiple of batch size, replicate \n # extra data (at random)\n np.random.seed(3435)\n batch_size = self.batch_size\n if train_set.shape[0] % batch_size > 0:\n extra_data_num = batch_size - train_set.shape[0] % batch_size\n #extra_data = train_set[np.random.choice(train_set.shape[0], extra_data_num)]\n perm_set = np.random.permutation(train_set) \n extra_data = perm_set[:extra_data_num]\n new_data = np.append(train_set, extra_data, axis=0)\n else:\n new_data = train_set\n \n shuffled_data = np.random.permutation(new_data) # Attardi\n n_batches = shuffled_data.shape[0]/batch_size\n # divide train set into 90% train, 10% validation sets\n n_train_batches = int(np.round(n_batches*0.8))\n n_val_batches = n_batches - n_train_batches\n train_set = shuffled_data[:n_train_batches*batch_size,:]\n val_set = shuffled_data[n_train_batches*batch_size:,:] \n # push data to gpu \n # the dataset has the format [word_indices,padding,user,label]\n train_set_x, train_set_y = shared_dataset(train_set[:,:-2], train_set[:,-1]) \n train_set_u = theano.shared(np.asarray(train_set[:,-2],dtype='int32')) \n # val_set_x = val_set[:,:-2]\n # val_set_u = val_set[:,-2]\n # val_set_y = val_set[:,-1]\n val_set_x, val_set_y = shared_dataset(val_set[:,:-2], val_set[:,-1])\n val_set_u = theano.shared(np.asarray(val_set[:,-2],dtype='int32')) \n test_set_x = test_set[:,:-2]\n test_set_u = test_set[:,-2]\n test_set_y = test_set[:,-1] \n batch_start = self.index * batch_size\n batch_end = batch_start + batch_size\n\n # compile Theano functions to get train/val/test errors\n \n \n test_y_pred = self.predict(test_set_x,test_set_u)\n test_error = T.mean(T.neq(test_y_pred, self.y))\n # errors on train set\n if self.Users is not None:\n train_model = theano.function([self.index], cost, updates=grad_updates,\n givens={\n self.x: train_set_x[batch_start:batch_end],\n self.y: train_set_y[batch_start:batch_end],\n self.u: train_set_u[batch_start:batch_end]\n },\n allow_input_downcast = True)\n\n train_error = theano.function([self.index], self.errors(self.y),\n givens={\n self.x: train_set_x[batch_start:batch_end],\n self.y: train_set_y[batch_start:batch_end],\n self.u: train_set_u[batch_start:batch_end]},\n allow_input_downcast=True)\n val_model = theano.function([self.index], self.errors(self.y),\n givens={\n self.x: val_set_x[batch_start:batch_end],\n self.y: val_set_y[batch_start:batch_end], \n self.u: val_set_u[batch_start:batch_end]},\n allow_input_downcast=True)\n test_model = theano.function([self.x, self.u, self.y], test_error, allow_input_downcast=True)\n else:\n train_model = theano.function([self.index], cost, updates=grad_updates,\n givens={\n self.x: train_set_x[batch_start:batch_end],\n self.y: train_set_y[batch_start:batch_end]},\n allow_input_downcast = True)\n\n train_error = theano.function([self.index], self.errors(self.y),\n givens={\n self.x: train_set_x[batch_start:batch_end],\n self.y: train_set_y[batch_start:batch_end]},\n allow_input_downcast=True)\n\n val_model = theano.function([self.index], self.errors(self.y),\n givens={\n self.x: val_set_x[batch_start:batch_end],\n self.y: val_set_y[batch_start:batch_end]},\n allow_input_downcast=True)\n test_model = theano.function([self.x, self.y], test_error, allow_input_downcast=True)\n\n # start training over mini-batches\n print 'training...' \n best_val_perf = 0\n test_perf = 0 \n patience = 5\n drops = 0\n prev_val_perf = 0 \n for epoch in xrange(epochs):\n start_time = time.time()\n # FIXME: should permute whole set rather than minibatch indexes\n if shuffle_batch:\n for minibatch_index in np.random.permutation(range(n_train_batches)):\n cost_epoch = train_model(minibatch_index)\n self.set_zero(self.zero_vec) # CHECKME: Why?\n else:\n for minibatch_index in xrange(n_train_batches):\n cost_epoch = train_model(minibatch_index) \n self.set_zero(self.zero_vec)\n train_losses = [train_error(i) for i in xrange(n_train_batches)]\n train_perf = 1 - np.mean(train_losses)\n val_losses = [val_model(i) for i in xrange(n_val_batches)]\n val_perf = 1 - np.mean(val_losses) \n info = 'epoch: %i\\%i (%.2f secs) train acc: %.2f %% | val acc: %.2f %%' % (\n epoch,epochs, time.time()-start_time, train_perf * 100., val_perf*100.) \n # from ipdb import set_trace; set_trace()\n if val_perf > prev_val_perf: \n drops=0\n if val_perf >= best_val_perf:\n best_val_perf = val_perf\n info+= \" **\"\n if model:\n # print \"save model\"\n self.save(model)\n if self.Users is not None:\n test_loss = test_model(test_set_x, test_set_u, test_set_y)\n else:\n test_loss = test_model(test_set_x, test_set_y)\n test_perf = 1 - test_loss \n else: \n drops+=1\n if drops >= patience:\n print \"Ran out of patience...\"\n break\n prev_val_perf = val_perf\n print info\n # set_trace() \n return test_perf",
"def train_and_score_bagging(network):\n\n train_predictions = pd.read_pickle('data/train_predictions.pkl.gz', compression='gzip')\n test_predictions = pd.read_pickle('data/test_predictions.pkl.gz', compression='gzip')\n\n train_actuals = pd.read_pickle('data/train_actuals.pkl.gz', compression='gzip')\n test_actuals = pd.read_pickle('data/test_actuals.pkl.gz', compression='gzip')\n\n\n train_x = np.array(train_predictions.values)\n train_y = train_actuals[0].values\n train_log_y = safe_log(train_y)\n test_x = np.array(test_predictions.values)\n test_y = test_actuals[0].values\n test_log_y = safe_log(test_y)\n\n model = compile_model(network)\n\n print('\\rNetwork')\n\n for property in network:\n print(property, ':', network[property])\n logging.info('%s: %s' % (property, network[property]))\n\n test = xgb.DMatrix(test_x)\n train = xgb.DMatrix(train_x, label=train_log_y)\n\n\n\n eval_set = [(test_x, test_log_y)]\n model.fit(train_x, train_log_y, early_stopping_rounds=20, eval_metric='mae', eval_set=eval_set,\n verbose=False)\n\n # eval_set = [(test, test_log_y)]\n # xgb.train(network, train, num_boost_round=5000, evals=eval_set, early_stopping_rounds=5)\n\n predictions = model.predict(test_x)\n # predictions = xgb.predict(test_x)\n inverse_predictions = safe_exp(predictions)\n score = mean_absolute_error(test_y, inverse_predictions)\n mape = safe_mape(test_y, inverse_predictions)\n\n print('\\rResults')\n\n best_round = xgb.best_iteration\n\n if np.isnan(score):\n score = 9999\n\n print('best round:', best_round)\n print('loss:', score)\n print('mape:', mape)\n print('-' * 20)\n\n logging.info('best round: %d' % best_round)\n logging.info('loss: %.4f' % score)\n logging.info('mape: %.4f' % mape)\n logging.info('-' * 20)\n\n eval_results({'xgb_predictions': {\n 'actual_y': test_y,\n 'y_predict': inverse_predictions\n }\n })\n\n range_results({\n 'xgb_predictions': inverse_predictions,\n }, test_y)",
"def __init__(self, sess, max_iter=50001, optim='adagrad', learning_rate=1e-2,\n d_per_iter=1, g_per_iter=2, d_update=True, g_update=True,\n real_n=1000, real_dim=2, fake_n=1000, z_dim=3, g_out_dim=2,\n g_layers_depth=5, g_layers_width=None, g_activations=None,\n d_out_dim=1, d_layers_depth=5, d_layers_width=5,\n d_activations=None, d_batch_size=25, x_lims=None, y_lims=None,\n grid_gran=21, grid_n=None, dataset='gaussian', expt='test_low_alpha'):\n self.sess = sess\n self.max_iter = max_iter\n self.optim = optim\n self.learning_rate = learning_rate\n\n self.d_per_iter = d_per_iter \n self.g_per_iter = g_per_iter\n self.d_update = d_update\n self.g_update = not d_update \n\n self.real_n = real_n \n self.real_dim = real_dim \n self.fake_n = fake_n\n\n self.z_dim = z_dim\n self.g_out_dim = g_out_dim\n self.g_layers_depth = g_layers_depth\n self.g_layers_width = [[5]] * (g_layers_depth - 1) + [[g_out_dim]]\n self.g_activations = [tf.nn.tanh, tf.nn.elu]\n\n self.d_out_dim = d_out_dim\n self.d_layers_depth = d_layers_depth\n self.d_layers_width = d_layers_width\n self.d_activations = [tf.nn.tanh, tf.nn.relu]\n self.d_batch_size = d_batch_size\n\n self.x_lims = [-6., 2.]\n self.y_lims = [-2., 6.]\n self.grid_gran = grid_gran\n self.grid_n = grid_gran ** 2\n self.grid, self.x_grid, self.y_grid = self.make_grid()\n\n self.dataset = dataset \n self.real_points = load_2d_data(dataset, real_n, real_dim)\n\n self.expt = expt\n\n self.build_model()",
"def trainer(model, X_train, y_train, X_valid, y_valid, config):\n # loop for number of epochs\n # shuffle inputs based off seed\n # need to shuffle validation based off same seed\n # forward prop and get xenloss\n # backprop and update weights\n\n stop_count = config['early_stop_epoch']\n b_size = config[\"batch_size\"]\n stop = config['early_stop']\n\n xnloss = []\n val_loss = [float('inf')]\n test_scores = []\n\n train_accu = []\n valid_accu = []\n\n\n #validation loss increase per epoch counter\n c = -1\n \n for i in range(config[\"epochs\"]):\n np.random.seed(i)\n np.random.shuffle(X_train)\n\n np.random.seed(i)\n np.random.shuffle(y_train)\n\n '''You should average the loss across all mini batches'''\n #means sum up loss from all mini-batches and divide by num_batches\n sums = 0\n\n num_batches = int(X_train.shape[0] / b_size)\n k=0\n for j in range(num_batches):\n # choose minibatch\n x = X_train[j * b_size: (j+1) * b_size]\n targets = y_train[j * b_size: (j+1) * b_size]\n loss, y_pred = model.forward_pass(x, targets)\n loss = loss / (config['batch_size'] * 10) # 10 classes\n sums += loss\n #xnloss.append(loss)\n model.backward_pass()\n k +=1\n # if k < 5 or k > 44:\n # print(targets[0, :])\n # print(y_pred[0, :])\n # print(y_pred[0, :].sum())\n # print(k, '=============')\n\n # mini-batch done here, take avg of loss\n avg_loss = sums / num_batches\n xnloss.append(avg_loss)\n \n ''' epochs loop continues here\n 0) perform validation and compute its (val) loss\n\n 1) calculate test accuracy for every epoch where the\n validation loss is better than the previous validation loss.\n \n 2) Save this result (test score OR loss?) and choose the best \n one when you hit the early stopping criteria.\n\n 3) early stopping - stop training (epochs loop) after 5th consecutive \n increase in validation loss. (Experiment with diff values).\n '''\n\n '''VALIDATION PERFORMACE'''\n v_loss, v_pred = model.forward_pass(X_valid, y_valid)\n v_loss_norm = v_loss / (len(X_valid) * 10)\n\n\n '''TEST ACCURACY''' \n #if val loss better (less) than prev: calculate test scores\n \n if v_loss_norm > val_loss[-1]:\n print(\"val loss going up from last time at epoch i=\", i)\n c += 1\n else:\n c = 0\n '''insert code for test accu here'''\n # val_loss.append(v_loss_norm)\n # else: #else val loss increased, so increment counter\n \n val_loss.append(v_loss_norm)\n \n '''EARLY STOPPING'''\n if stop and c == stop_count:\n print(\"early stopped at epoch =\", i+1)\n break\n\n print(val_loss[1:3])\n print(val_loss, len(xnloss), len(val_loss[1:]))\n #outside of epochs loop\n plt.plot(xnloss, label='training loss')\n plt.plot(val_loss[1:], label='validation loss')\n plt.title(\"losses across all epochs\")\n plt.xlabel(\"epochs\")\n plt.ylabel(\"avg loss for the epoch\")\n plt.legend()\n plt.savefig('raised_a.png')\n plt.show()\n #firstplot.png is training loss against # of batches, in 1 epoch\n #avgacrossepochs.png is avg training loss of all batches, across 50 epochs\n # both_losses = []\n \n # for i in range(len(xnloss)):\n # both_losses.append((val_loss[i], xnloss[i]))\n # print(\"validation errors: \", [(val_loss[i], xnloss[i]) for i in range(len(xnloss))])",
"def AddTrainingOperators(model, predict, label, value, value_label, base_lr=-0.003):\n xent = model.LabelCrossEntropy([predict, label], 'xent')\n # compute the expected loss\n loss1 = model.AveragedLoss(xent, \"loss1\")\n loss2 = model.Sub([value, value_label], \"loss2\")\n # track the accuracy of the model\n AddAccuracy(model, predict, label)\n # use the average loss we just computed to add gradient operators to the model\n model.AddGradientOperators([loss1, loss2])\n # do a simple stochastic gradient descent\n ITER = brew.iter(model, \"iter\")\n # set the learning rate schedule\n LR = model.LearningRate(ITER, \"LR\", base_lr=base_lr, policy=\"fixed\") # when policy=fixed, stepsize and gamma are ignored\n # ONE is a constant value that is used in the gradient update. We only need\n # to create it once, so it is explicitly placed in param_init_net.\n ONE = model.param_init_net.ConstantFill([], \"ONE\", shape=[1], value=1.0)\n # Now, for each parameter, we do the gradient updates.\n for param in model.params:\n # Note how we get the gradient of each parameter - ModelHelper keeps\n # track of that.\n param_grad = model.param_to_grad[param]\n # The update is a simple weighted sum: param = param + param_grad * LR\n model.WeightedSum([param, ONE, param_grad, LR], param)",
"def train_model(data_dir, rows):\n X, y = read_vectorized_features(data_dir, rows)\n\n # Set params\n # Scores ~0.784 (without tuning and early stopping)\n params = {'boosting_type': 'gbdt',\n 'max_depth' : -1,\n 'objective': 'binary',\n 'nthread': 3, # Updated from nthread\n 'num_leaves': 64,\n 'learning_rate': 0.05,\n 'max_bin': 512,\n 'subsample_for_bin': 200,\n 'subsample': 1,\n 'subsample_freq': 1,\n 'colsample_bytree': 0.8,\n 'reg_alpha': 5,\n 'reg_lambda': 10,\n 'min_split_gain': 0.5,\n 'min_child_weight': 1,\n 'min_child_samples': 5,\n 'scale_pos_weight': 1,\n 'num_class' : 1,\n 'metric' : 'binary_error'}\n\n # Create parameters to search\n gridParams = {\n 'learning_rate': [0.15, 0.2, 0.25, 0.3], #default = 0.1\n 'n_estimators': [40],\n 'num_leaves': [6,8,12,16],\n 'boosting_type' : ['gbdt'],\n 'objective' : ['binary'],\n 'random_state' : [501], # Updated from 'seed'\n 'colsample_bytree' : [0.65, 0.66],\n 'subsample' : [0.7,0.75],\n 'reg_alpha' : [1,1.2],\n 'reg_lambda' : [1,1.2,1.4],\n }\n\n # Create classifier to use. Note that parameters have to be input manually\n # not as a dict!\n mdl = lgb.LGBMClassifier(boosting_type= 'gbdt',\n objective = 'binary',\n n_jobs = 3, # Updated from 'nthread'\n silent = True,\n max_depth = params['max_depth'],\n max_bin = params['max_bin'],\n subsample_for_bin = params['subsample_for_bin'],\n subsample = params['subsample'],\n subsample_freq = params['subsample_freq'],\n min_split_gain = params['min_split_gain'],\n min_child_weight = params['min_child_weight'],\n min_child_samples = params['min_child_samples'],\n scale_pos_weight = params['scale_pos_weight'])\n\n # Create the grid\n grid = GridSearchCV(mdl, gridParams,\n verbose=0,\n cv=4,\n n_jobs=2)\n # train\n grid.fit(X, y)\n print(grid.best_params_)\n print(grid.best_score_)\n\n\n # train\n lgbm_dataset = lgb.Dataset(X, y)\n lgbm_model = lgb.train({\"application\": \"binary\"}, lgbm_dataset)\n\n return lgbm_model",
"def objective(params, n_folds=N_FOLDS):\n\n # Keep track of evals\n global ITERATION\n\n ITERATION += 1\n\n # Retrieve the subsample if present otherwise set to 1.0\n subsample = params['boosting_type'].get('subsample', 1.0)\n\n # Extract the boosting type\n params['boosting_type'] = params['boosting_type']['boosting_type']\n params['subsample'] = subsample\n\n # Make sure parameters that need to be integers are integers\n for parameter_name in ['max_depth', 'subsample_for_bin', 'min_child_samples','min_child_weight','num_parallel_tree']:\n params[parameter_name] = int(params[parameter_name])\n\n start = timer()\n\n print('params',params)\n # Perform n_folds cross validation\n cv_results = xgb.cv(params, train_set,\n num_boost_round=3000,\n nfold=n_folds,\n stratified=True,\n early_stopping_rounds=100,\n feval=tpr_weight_funtion_xgb_cv,\n seed=50,\n verbose_eval=True,\n\n )\n\n print('cv_results\\n',type(cv_results),'\\n',cv_results)\n\n run_time = timer() - start\n\n # Extract the best score\n best_score = np.min(cv_results['test-TPR-mean'])\n\n # Loss must be minimized\n loss = best_score\n\n TPR_std = cv_results[cv_results['test-TPR-mean']==best_score]['test-TPR-std'].values[0]\n print('TPR_stdv', TPR_std)\n\n\n # Boosting rounds that returned the highest cv score\n n_estimators = int(np.argmin(cv_results['test-TPR-mean']) + 1)\n\n # Write to the csv file ('a' means append)\n of_connection = open(out_file, 'a')\n writer = csv.writer(of_connection)\n writer.writerow([loss,TPR_std, params, ITERATION, n_estimators, run_time])\n\n # Dictionary with information for evaluation\n return {'loss': loss,'TPR_std':TPR_std, 'params': params, 'iteration': ITERATION,\n 'estimators': n_estimators,\n 'train_time': run_time, 'status': STATUS_OK}",
"def __init__(self, b0, b1, learn_rate, epochs, X, Y):\n self.b0 = b0\n self.b1 = b1\n self.learning_rate = learn_rate\n self.epochs = epochs\n self.accuracy = 0\n self.GD_method(X, Y)",
"def train(self, rng):\n # pylint: disable=possibly-unused-variable\n rng, init_key = jax.random.split(rng)\n params = self.model.init(init_key, jnp.zeros([self.model.S_dim]))\n opt_state = self.tx.init(params)\n start_time = time.time()\n\n count_since_reset = 0\n all_metrics = []\n try:\n i = 0\n while i < self.num_steps:\n rng, key = jax.random.split(rng)\n # Pass the inputs in and take a gradient step.\n opt_state, params, metrics, grads, bad = self.opt_step(\n opt_state, params, key)\n all_metrics.append(jax.tree_map(np.array, metrics))\n if bad:\n finished_reason = \"nan\"\n return types.SimpleNamespace(**locals())\n\n count_since_reset += 1\n if i % self.print_every == 0 or np.remainder(np.log2(i), 1) == 0:\n now = time.time()\n rate = count_since_reset / (now - start_time)\n start_time = now\n count_since_reset = 0\n print(f\"{i} [{rate}/s]:\", jax.tree_map(float, metrics))\n sys.stdout.flush()\n time.sleep(0.02)\n i += 1\n\n except KeyboardInterrupt:\n finished_reason = \"interrupt\"\n return types.SimpleNamespace(**locals())\n\n finished_reason = \"done\"\n (opt_state, params) = jax.tree_map(np.array, (opt_state, params))\n return types.SimpleNamespace(**locals())\n # pylint: enable=possibly-unused-variable",
"def main():\n df = prepro_last()\n X, y = train_build(df)\n fit_store(X, y)",
"def train_and_eval():\n # train_file_name = 'adult.data'\n # test_file_name = 'adult.test'\n train_file_name = 'poker-hand-testing.data'\n test_file_name = 'poker-hand-training-true.data'\n #test_file_name = maybe_download()\n df_train = pd.read_csv(\n tf.gfile.Open(\"/opt/tensor/race_result_clean.csv\"),\n names=COLUMNS,\n skipinitialspace=True,\n skiprows=1)\n df_test = pd.read_csv(\n tf.gfile.Open(\"/opt/tensor/race_result_clean.csv\"),\n names=COLUMNS,\n skipinitialspace=True,\n skiprows=1)\n\n #df_train[LABEL_COLUMN] = (df_train[\"CLASS_Poker_Hand\"].apply(lambda x: x>5)).astype(int)\n #df_test[LABEL_COLUMN] = (df_test[\"CLASS_Poker_Hand\"].apply(lambda x: x>5)).astype(int)\n\n model_dir = tempfile.mkdtemp() if not FLAGS.model_dir else FLAGS.model_dir\n print(\"model directory = %s\" % model_dir)\n m = build_estimator(model_dir)\n print(m)\n m.fit(input_fn=lambda: input_fn(df_train), steps=FLAGS.train_steps)\n results = m.evaluate(input_fn=lambda: input_fn(df_test), steps=1)\n for key in sorted(results):\n print(\"%s: %s\" % (key, results[key]))",
"def define_parameters(act_func, dropout, fc_layer_units, labels, samples):\n\n\n\n # Define a training and test set\n test_size = 0.1 # training is set on 90%\n training_vec, test_vec, training_labels, test_labels = train_test_split(samples, labels, test_size=test_size)\n\n # Get the batch size\n batch_percentage = 0.1 # There is chosen to use a batch size of 10%\n batch_size = int(training_vec.shape[0] * batch_percentage)\n\n # Get the number of features\n feature_number = training_vec.shape[1]\n\n # Get the number of classes\n class_number = len(np.unique(labels))\n\n # Get the layer nodes in correct format\n int_layer_units = []\n units = fc_layer_units.split(',')\n for unit in units:\n int_layer_units.append(int(unit))\n\n # Get the dropout layers in correct format\n dropout_booleans = []\n dropouts = dropout.split(',')\n for layer in dropouts:\n layer = layer.lower()\n if layer == 'f' or layer == 'false':\n dropout_booleans.append(False)\n else:\n dropout_booleans.append(True)\n\n # Get the layer names of the neural network architecture\n layers = []\n for index, nodes in enumerate(int_layer_units):\n layers.append('fc ({})'.format(nodes))\n if dropout_booleans[index]:\n layers.append('do')\n layers = ' - '.join(layers)\n\n # Get the right activation function\n act_func = act_func.lower()\n if act_func == 'sigmoid' or act_func == 'sig' or act_func == 's':\n act_func = tf.nn.sigmoid\n act_title = 'sigmoid'\n elif act_func == 'relu' or act_func == 'r':\n act_func = tf.nn.relu\n act_title = 'ReLU'\n elif act_func == 'tanh' or act_func == 'tan' or act_func == 't':\n act_func = tf.tanh\n act_title = 'tanH'\n else:\n act_func = None\n act_title = 'none'\n\n return act_func, act_title, batch_size, class_number, feature_number, layers, dropout_booleans, int_layer_units, \\\n test_labels, test_vec, training_labels, training_vec",
"def train_step(model, x, optimizer):\n with tf.GradientTape() as tape:\n loss = compute_loss(model, x)\n gradients = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(gradients, model.trainable_variables))",
"def _learn_using_GD(self, y, tx, w, fn, gamma, lambda_, regularization):\n loss, grad = fn(y, tx, w, lambda_)\n loss, grad = self.apply_regularization(w, loss, grad, regularization, lambda_, tx.shape[0])\n w = w - gamma * grad\n return loss, w",
"def tuned_for_ec():\n # TODO(theosanderson): update these to true SOTA values\n hparams = contrib_training.HParams()\n hparams.add_hparam('gradient_clipping_decay', 0.9999)\n hparams.add_hparam('batch_style', 'bucket')\n hparams.add_hparam('batch_size', 34)\n hparams.add_hparam('dilation_rate', 5)\n hparams.add_hparam('filters', 411)\n hparams.add_hparam('first_dilated_layer', 1) # This is 0-indexed\n hparams.add_hparam('kernel_size', 7)\n hparams.add_hparam('num_layers', 5)\n hparams.add_hparam('pooling', 'mean')\n hparams.add_hparam('resnet_bottleneck_factor', 0.88152)\n hparams.add_hparam('lr_decay_rate', 0.9977)\n hparams.add_hparam('learning_rate', 0.00028748)\n hparams.add_hparam('decision_threshold', 0.3746)\n hparams.add_hparam('denominator_power', 0.88)\n\n hparams.add_hparam('train_steps', 650000)\n return hparams",
"def main():\n\n # choose number of data-points and sample a pair of vectors: the input\n # values and the corresponding target values\n N = 500\n inputs, targets = sample_data(N, arbitrary_function_2, seed=1)\n\n # specify the centres and scale of some rbf basis functions\n default_centres = np.linspace(0,1,21)\n default_scale = 0.03\n default_reg_param = 0.08\n\n # get the cross-validation folds\n num_folds = 4\n folds = create_cv_folds(N, num_folds)\n\n # evaluate then plot the performance of different reg params\n evaluate_reg_param(inputs, targets, folds, default_centres, default_scale)\n # evaluate then plot the performance of different scales\n evaluate_scale(inputs, targets, folds, default_centres, default_reg_param)\n # evaluate then plot the performance of different numbers of basis\n # function centres.\n evaluate_num_centres(\n inputs, targets, folds, default_scale, default_reg_param)\n\n plt.show()",
"def evaluate(cfg: DictConfig):\n\n # suppress TensorFlow and DALI warnings\n suppress_warnings()\n\n if cfg.USE_MULTI_GPUS.VALUE:\n # change number of visible gpus for evaluation\n set_gpus(cfg.USE_MULTI_GPUS.GPU_IDS)\n # update batch size according to available gpus\n data_generator.update_batch_size(cfg)\n\n if cfg.OPTIMIZATION.AMP:\n print(\"Enabling Automatic Mixed Precision(AMP) training\")\n policy = mixed_precision.Policy('mixed_float16')\n mixed_precision.set_global_policy(policy)\n\n if cfg.OPTIMIZATION.XLA:\n print(\"Enabling Automatic Mixed Precision(XLA) training\")\n tf.config.optimizer.set_jit(True)\n\n # create model\n strategy = None\n if cfg.USE_MULTI_GPUS.VALUE:\n # multi gpu training using tensorflow mirrored strategy\n strategy = tf.distribute.MirroredStrategy(\n cross_device_ops=tf.distribute.HierarchicalCopyAllReduce()\n )\n print('Number of visible gpu devices: {}'.format(strategy.num_replicas_in_sync))\n with strategy.scope():\n optimizer = tf.keras.optimizers.Adam(\n learning_rate=cfg.HYPER_PARAMETERS.LEARNING_RATE\n ) # optimizer\n if cfg.OPTIMIZATION.AMP:\n optimizer = mixed_precision.LossScaleOptimizer(\n optimizer,\n dynamic=True\n )\n dice_coef = DiceCoefficient(post_processed=True, classes=cfg.OUTPUT.CLASSES)\n dice_coef = tf.keras.metrics.MeanMetricWrapper(name=\"dice_coef\", fn=dice_coef)\n model = prepare_model(cfg, training=True)\n else:\n optimizer = tf.keras.optimizers.Adam(\n learning_rate=cfg.HYPER_PARAMETERS.LEARNING_RATE\n ) # optimizer\n if cfg.OPTIMIZATION.AMP:\n optimizer = mixed_precision.LossScaleOptimizer(\n optimizer,\n dynamic=True\n )\n dice_coef = DiceCoefficient(post_processed=True, classes=cfg.OUTPUT.CLASSES)\n dice_coef = tf.keras.metrics.MeanMetricWrapper(name=\"dice_coef\", fn=dice_coef)\n model = prepare_model(cfg, training=True)\n\n model.compile(\n optimizer=optimizer,\n loss=unet3p_hybrid_loss,\n metrics=[dice_coef],\n )\n\n # weights model path\n checkpoint_path = join_paths(\n cfg.WORK_DIR,\n cfg.CALLBACKS.MODEL_CHECKPOINT.PATH,\n f\"{cfg.MODEL.WEIGHTS_FILE_NAME}.hdf5\"\n )\n\n assert os.path.exists(checkpoint_path), \\\n f\"Model weight's file does not exist at \\n{checkpoint_path}\"\n\n # TODO: verify without augment it produces same results\n # load model weights\n model.load_weights(checkpoint_path, by_name=True, skip_mismatch=True)\n model.summary()\n\n # data generators\n val_generator = data_generator.get_data_generator(cfg, \"VAL\", strategy)\n validation_steps = data_generator.get_iterations(cfg, mode=\"VAL\")\n\n # evaluation metric\n evaluation_metric = \"dice_coef\"\n if len(model.outputs) > 1:\n evaluation_metric = f\"{model.output_names[0]}_dice_coef\"\n\n result = model.evaluate(\n x=val_generator,\n steps=validation_steps,\n workers=cfg.DATALOADER_WORKERS,\n return_dict=True,\n )\n\n # return computed loss, validation accuracy, and it's metric name\n return result, evaluation_metric",
"def train(self, sess, observations, action_gradients, is_training=True):\n sess.run(self.optim, {\n self.observations: observations,\n self.action_gradients: action_gradients,\n self.is_training: is_training\n })",
"def model(X_train, Y_train, X_test, Y_test, num_iterations, learning_rate, print_cost):\n\n\n # initialize parameters with zeros\n w, b = initialize_with_zeros(X_train.shape[0]);\n\n print(\"w.shape() = \" +str(w.shape)+ \", b = \" +str(b));\n\n # Gradient descent\n parameters, grads, costs = optimize(w, b, X_train, Y_train, num_iterations, learning_rate, print_cost);\n \n # Retrieve parameters w and b from dictionary \"parameters\"\n w = parameters[\"w\"]\n b = parameters[\"b\"]\n \n # Predict test/train set examples \n Y_prediction_test = predict(w, b, X_test)\n Y_prediction_train = predict(w, b, X_train)\n\n # Print train/test Errors\n print(\"train accuracy: {} %\".format(100 - np.mean(np.abs(Y_prediction_train - Y_train)) * 100))\n print(\"test accuracy: {} %\".format(100 - np.mean(np.abs(Y_prediction_test - Y_test)) * 100))\n \n d = {\"costs\": costs,\n \"Y_prediction_test\": Y_prediction_test, \n \"Y_prediction_train\" : Y_prediction_train, \n \"w\" : w,\n \"b\" : b,\n \"learning_rate\" : learning_rate,\n \"num_iterations\": num_iterations}\n \n return d",
"def train_naive(): # add arguments as needed\n pass",
"def __init__(self, activation_function=Function(), input_size=1, output_size=1, noise_size=0,\n learning_batch_size=1, param_desc='Parametres de descente', nb_exp=0):\n self._input_size = input_size\n self._output_size = output_size\n self._learning_batch_size = learning_batch_size\n self._noise_size = noise_size\n # self._weights = np.transpose(np.random.randn(input_size, output_size))\n self._weights = np.random.randn(output_size, input_size+noise_size)\n self._bias = np.zeros((output_size, 1)) # Vecteur colonne\n # On peut laisser le biais comme un vecteur colonne, car en faire une matrice contenant\n # learning_batch_size fois la même colonne. Lorsque l'on aura besoin du biais dans les\n # calculs, il y aura mathématiquement parlant un problème de dimension (addition vecteur\n # + matrice), cependant numpy gère ça en additionnant le vecteur de biais à chacune des\n # colonnes de la matrice (broadcast)\n self.input = np.zeros((input_size, learning_batch_size))\n self._activation_function = activation_function\n self._activation_function.vectorize()\n self.activation_levels = np.zeros((output_size, learning_batch_size)) # Chaque colonne\n # correspond à une entrée du batch\n self.output = np.zeros((output_size, learning_batch_size)) # Chaque colonne\n # correspond à une entrée du batch\n\n self.update_weights_value = np.zeros((output_size, input_size + noise_size))\n self.update_bias_value = np.zeros((output_size, 1))\n\n self.noise_input = np.zeros((noise_size, learning_batch_size))\n\n # self.update_weights_value = np.zeros((output_size, input_size))\n\n self.weights_gradients_sum = np.zeros((output_size, input_size + noise_size))\n # self.weights_gradients_sum = np.zeros((output_size, input_size))\n self.bias_gradients_sum = np.zeros((output_size, 1))\n self.weights_moment = np.zeros((output_size, input_size + noise_size))\n # self.weights_moment = np.zeros((output_size, input_size))\n self.bias_moment = np.zeros((output_size, 1))\n self.weights_eta = np.zeros((output_size, input_size + noise_size))\n # self.weights_eta = np.zeros((output_size, input_size)) # need meilleur nom\n self.bias_eta = np.zeros((output_size, 1)) # need meilleur nom\n\n data_interface = DataInterface()\n param_liste = data_interface.read_conf('config_algo_descente.ini', param_desc) # Lecture\n # du fichier de config\n param_liste = data_interface.extract_param(param_liste, nb_exp)\n self.algo_utilise = param_liste['algo_utilise']\n self.eta = param_liste['eta']\n self.momentum = param_liste['momentum']\n self.epsilon = param_liste['epsilon']\n self.gamma = param_liste['gamma']\n self.moment = param_liste['moment']\n self.eta = param_liste['eta']\n self.gamma_1 = param_liste['gamma_1']\n self.gamma_2 = param_liste['gamma_2']\n self.instant = 0",
"def main(args, base_dir):\n for i in range(args.n_training):\n # value of the next seed\n seed = args.seed + i\n\n # The time when the current experiment started.\n now = strftime(\"%Y-%m-%d-%H:%M:%S\")\n\n # Create a save directory folder (if it doesn't exist).\n if args.log_dir is not None:\n dir_name = args.log_dir\n else:\n dir_name = os.path.join(base_dir, '{}/{}'.format(\n args.env_name, now))\n ensure_dir(dir_name)\n\n # Get the policy class.\n if args.alg == \"TD3\":\n from hbaselines.multiagent.td3 import MultiFeedForwardPolicy\n elif args.alg == \"SAC\":\n from hbaselines.multiagent.sac import MultiFeedForwardPolicy\n elif args.alg == \"PPO\":\n from hbaselines.multiagent.ppo import MultiFeedForwardPolicy\n elif args.alg == \"TRPO\":\n from hbaselines.multiagent.trpo import MultiFeedForwardPolicy\n else:\n raise ValueError(\"Unknown algorithm: {}\".format(args.alg))\n\n # Get the hyperparameters.\n hp = get_hyperparameters(args, MultiFeedForwardPolicy)\n\n # add the seed for logging purposes\n params_with_extra = hp.copy()\n params_with_extra['seed'] = seed\n params_with_extra['env_name'] = args.env_name\n params_with_extra['policy_name'] = \"MultiFeedForwardPolicy\"\n params_with_extra['algorithm'] = args.alg\n params_with_extra['date/time'] = now\n\n # Add the hyperparameters to the folder.\n with open(os.path.join(dir_name, 'hyperparameters.json'), 'w') as f:\n json.dump(params_with_extra, f, sort_keys=True, indent=4)\n\n run_exp(\n env=args.env_name,\n policy=MultiFeedForwardPolicy,\n hp=hp,\n dir_name=dir_name,\n evaluate=args.evaluate,\n seed=seed,\n eval_interval=args.eval_interval,\n log_interval=args.log_interval,\n save_interval=args.save_interval,\n initial_exploration_steps=args.initial_exploration_steps,\n ckpt_path=args.ckpt_path,\n )",
"def fit(self):\n\n # Initial values for geco algorithm\n if self.params[\"networks\"][\"variational\"]:\n self.langrange_multiplier = self.params[\"geco\"][\"initial_lagrange_multiplier\"]\n self.C_ma = None\n\n # TRAIN\n for ep in range(self.params[\"optimization\"][\"epochs\"]):\n print(\"Epoch %s / %s\" % (str(ep + 1), str(self.params[\"optimization\"][\"epochs\"])))\n pbar = tqdm.tqdm(self.train_data_loader)\n for batch_idx, rollout_batch in enumerate(pbar):\n # Move to device and change dtype\n rollout_batch = rollout_batch.to(self.device).type(self.dtype)\n\n # Do an optimization step\n losses, prediction = self.training_step(rollouts=rollout_batch)\n\n # Log progress\n self.training_logger.step(losses=losses,\n rollout_batch=rollout_batch,\n prediction=prediction,\n model=self.hgn)\n\n # Progress-bar msg\n msg = \", \".join([\n f\"{k}: {v:.2e}\" for k, v in losses.items() if v is not None\n ])\n pbar.set_description(msg)\n # Save model\n self.hgn.save(self.model_save_file)\n\n self.test()\n return self.hgn",
"def train(self,features,y):\r\n \r\n if self.learn_type == \"nn\":\r\n #generate supervised dataset\r\n return(self.learner.train_on_batch(features,y))\r\n elif self.learn_type == \"linear\":\r\n grad = 0\r\n n = len(features)\r\n for i in range(n):\r\n #sum over the instances to get an estimate of the gradient\r\n print((y[i] - self.learner.activate(features[i])))\r\n grad -= (y[i] - self.learner.activate(features[i])) * \\\r\n self.learner.grad(features[i])\r\n grad /= n\r\n #update paramter\r\n param = np.copy(self.learner.param)\r\n self.learner.param = param - self.alpha * grad\r\n #print(self.learner.param)\r",
"def train_and_score_xgb(network):\n\n df_all_train_x = pd.read_pickle('data/df_all_train_x.pkl.gz', compression='gzip')\n df_all_train_y = pd.read_pickle('data/df_all_train_y.pkl.gz', compression='gzip')\n df_all_train_actuals = pd.read_pickle('data/df_all_train_actuals.pkl.gz', compression='gzip')\n df_all_test_x = pd.read_pickle('data/df_all_test_x.pkl.gz', compression='gzip')\n df_all_test_y = pd.read_pickle('data/df_all_test_y.pkl.gz', compression='gzip')\n df_all_test_actuals = pd.read_pickle('data/df_all_test_actuals.pkl.gz', compression='gzip')\n\n train_y = df_all_train_y[0].values\n train_actuals = df_all_train_actuals[0].values\n train_log_y = safe_log(train_y)\n train_x = df_all_train_x.values\n test_actuals = df_all_test_actuals.values\n test_y = df_all_test_y[0].values\n test_log_y = safe_log(test_y)\n test_x = df_all_test_x.values\n\n # Use keras model to generate x vals\n mae_intermediate_model = load_model('models/mae_intermediate_model.h5')\n\n mae_vals_train = mae_intermediate_model.predict(train_x)\n mae_vals_test = mae_intermediate_model.predict(test_x)\n\n # train = xgb.DMatrix(mae_vals_train, label=train_log_y)\n # test = xgb.DMatrix(mae_vals_test)\n\n model = compile_model(network)\n\n print('\\rNetwork')\n\n for property in network:\n print(property, ':', network[property])\n logging.info('%s: %s' % (property, network[property]))\n\n\n eval_set = [(mae_vals_test, test_log_y)]\n model.fit(mae_vals_train, train_log_y, early_stopping_rounds=5, eval_metric='mae', eval_set=eval_set)\n # , verbose=False)\n\n # eval_set = [(test, test_log_y)]\n # xgb.train(network, train, num_boost_round=5000, evals=eval_set, early_stopping_rounds=5)\n\n\n predictions = model.predict(mae_vals_test)\n # predictions = xgb.predict(test)\n score = mean_absolute_error(test_log_y, predictions)\n\n print('\\rResults')\n\n best_round = model.best_iteration\n # best_round = xgb.best_iteration\n\n if np.isnan(score):\n score = 9999\n\n print('best round:', best_round)\n print('loss:', score)\n print('-' * 20)\n\n logging.info('best round: %d' % best_round)\n logging.info('loss: %.4f' % score)\n logging.info('-' * 20)\n\n return score",
"def train_and_eval(model_dir, model_type, train_steps, train_data, test_data, train_embeddings_file_name, test_embeddings_file_name, positive_labels, combination_method, method):\n \n index_map, weights = wvd.load(train_embeddings_file_name)\n #Get positive labels\n positive_labels = positive_labels.split(',')\n \n print(\"reading data...\")\n train_file_name = train_data \n df_train = pd.read_table(train_file_name, dtype={'node1':str, 'node2':str})\n df_train = df_train.sample(frac=1)\n\n # remove NaN elements\n df_train = df_train.dropna(how='any', axis=0)\n \n df_train[LABEL_COLUMN] = (\n df_train[\"label\"].apply(lambda x: label_func(x, positive_labels))).astype(int)\n\n model_dir = tempfile.mkdtemp() if not model_dir else model_dir\n print(\"model directory = %s\" % model_dir)\n \n train_x, _, train_y, _ = get_input(df_train, weights, index_map, combination_method)\n \n print(\"\\nBuilding model...\")\n m = build_estimator(model_dir, model_type, weights, index_map, combination_method)\n \n print(\"\\nTraining model...\")\n if model_type == \"regressor\":\n m.fit(train_x, train_y, n_epoch=train_steps, show_metric=True, snapshot_epoch=False)\n \n print(\"\\nTesting model...\")\n index_map, weights = wvd.load(test_embeddings_file_name)\n \n print(\"reading data...\")\n test_file_name = test_data\n df_test = pd.read_table(test_file_name, dtype={'node1':str, 'node2':str})\n df_test = df_test.sample(frac=1)\n\n # remove NaN elements\n df_test = df_test.dropna(how='any', axis=0)\n \n df_test[LABEL_COLUMN] = (\n df_test[\"label\"].apply(lambda x: label_func(x, positive_labels))).astype(int)\n \n if model_type == \"regressor\":\n test_x, test_original_y, test_index_y, test_original_x = get_input(df_test, weights, index_map, combination_method, data_purpose='test')\n node_sets = get_node_sets(test_original_x, test_original_y)\n \n print(\"\\nPredicting:\")\n model_predictions = m.predict(test_x)\n model_predictions = list(model_predictions)\n #Covert back to 1 and 0\n predictions = []\n model_predictions_probs = []\n for prediction in model_predictions:\n predictions.append(prediction[1]) #non-thresholded value of positve class\n model_predictions_probs.append(prediction[1])\n \n k = int(len([i for i in test_original_y if i == 1]) * 0.3)\n do_evaluations([x for x in test_original_x], [y for y in test_original_y], [p for p in predictions], k, node_sets, \n positive_labels, model=m, weights=weights, index_map=index_map, combination_method=combination_method)\n #Uncomment to log ranked links\n #log_predictions([x for x in test_original_x], [y for y in test_original_y], [p for p in predictions], k, node_sets, \n # positive_labels, model=m, weights=weights, index_map=index_map, combination_method=combination_method,\n # outfilename=combination_method, method=method)",
"def main(cfg, logger):\n\n # Initialize parameters\n model_selection_metric = cfg['train']['model_selection_metric']\n\n if cfg['train']['model_selection_mode'] == 'maximize':\n model_selection_sign = 1\n elif cfg['train']['model_selection_mode'] == 'minimize':\n model_selection_sign = -1\n else:\n raise ValueError(\n 'model_selection_mode must be either maximize or minimize.')\n\n # Get data loader\n train_loader = make_data_loader(cfg, phase='train')\n val_loader = make_data_loader(cfg, phase='val')\n\n # Set up tensorboard logger\n tboard_logger = SummaryWriter(os.path.join(cfg['misc']['log_dir'], 'logs'))\n\n # Get model\n model = config.get_model(cfg)\n\n # Get optimizer and trainer\n optimizer = getattr(optim, cfg['optimizer']['alg'])(model.parameters(), lr=cfg['optimizer']['learning_rate'],\n weight_decay=cfg['optimizer']['weight_decay'])\n\n trainer = config.get_trainer(cfg, model, optimizer, tboard_logger)\n\n # Load pre-trained model if existing\n kwargs = {\n 'model': model,\n 'optimizer': optimizer,\n }\n\n checkpoint_io = CheckpointIO(cfg['misc']['log_dir'], initialize_from=cfg['model']['init_from'],\n initialization_file_name=cfg['model']['init_file_name'], **kwargs)\n\n try:\n load_dict = checkpoint_io.load('model.pt')\n except FileExistsError:\n load_dict = dict()\n\n epoch_it = load_dict.get('epoch_it', -1)\n it = load_dict.get('it', -1)\n\n metric_val_best = load_dict.get(\n 'loss_val_best', -model_selection_sign * np.inf)\n\n if metric_val_best == np.inf or metric_val_best == -np.inf:\n metric_val_best = -model_selection_sign * np.inf\n\n logger.info('Current best validation metric ({}): {:.5f}'.format(\n model_selection_metric, metric_val_best))\n\n # Training parameters\n stat_interval = cfg['train']['stat_interval']\n stat_interval = stat_interval if stat_interval > 0 else abs(\n stat_interval * len(train_loader))\n\n chkpt_interval = cfg['train']['chkpt_interval']\n chkpt_interval = chkpt_interval if chkpt_interval > 0 else abs(\n chkpt_interval * len(train_loader))\n\n val_interval = cfg['train']['val_interval']\n val_interval = val_interval if val_interval > 0 else abs(\n val_interval * len(train_loader))\n\n # Print model parameters and model graph\n nparameters = sum(p.numel() for p in model.parameters())\n # print(model)\n logger.info('Total number of parameters: {}'.format(nparameters))\n\n # Training loop\n while epoch_it < cfg['train']['max_epoch']:\n epoch_it += 1\n\n for batch in train_loader:\n it += 1\n loss = trainer.train_step(batch, it)\n tboard_logger.add_scalar('train/loss', loss, it)\n\n # Print output\n if stat_interval != 0 and (it % stat_interval) == 0 and it != 0:\n logger.info('[Epoch {}] it={}, loss={:.4f}'.format(\n epoch_it, it, loss))\n\n # Save checkpoint\n if (chkpt_interval != 0 and (it % chkpt_interval) == 0) and it != 0:\n logger.info('Saving checkpoint')\n checkpoint_io.save('model.pt', epoch_it=epoch_it, it=it,\n loss_val_best=metric_val_best)\n\n # Run validation\n if val_interval != 0 and (it % val_interval) == 0 and it != 0:\n eval_dict = trainer.evaluate(val_loader, it)\n\n metric_val = eval_dict[model_selection_metric]\n logger.info('Validation metric ({}): {:.4f}'.format(\n model_selection_metric, metric_val))\n\n for k, v in eval_dict.items():\n tboard_logger.add_scalar('val/{}'.format(k), v, it)\n\n if model_selection_sign * (metric_val - metric_val_best) > 0:\n metric_val_best = metric_val\n logger.info(\n 'New best model (loss {:.4f})'.format(metric_val_best))\n checkpoint_io.save('model_best.pt', epoch_it=epoch_it, it=it,\n loss_val_best=metric_val_best)\n\n # Quit after the maximum number of epochs is reached\n logger.info('Training completed after {} Epochs ({} it) with best val metric ({})={}'.format(\n epoch_it, it, model_selection_metric, metric_val_best))",
"def training_step(self, **kwargs):\n raise NotImplementedError",
"def __init__(self,\n exp_name,\n ds_train,\n ds_val,\n epochs=210,\n batch_size=16,\n num_workers=4,\n loss='JointsMSELoss',\n lr=0.001,\n lr_decay=True,\n lr_decay_steps=(170, 200),\n lr_decay_gamma=0.1,\n optimizer='Adam',\n weight_decay=0.,\n momentum=0.9,\n nesterov=False,\n pretrained_weight_path=None,\n checkpoint_path=None,\n log_path='./logs',\n use_tensorboard=True,\n model_c=48,\n model_nof_joints=18,\n model_bn_momentum=0.1,\n flip_test_images=True,\n device=None\n ):\n super(GOLFTrain, self).__init__(\n exp_name=exp_name,\n ds_train=ds_train,\n ds_val=ds_val,\n epochs=epochs,\n batch_size=batch_size,\n num_workers=num_workers,\n loss=loss,\n lr=lr,\n lr_decay=lr_decay,\n lr_decay_steps=lr_decay_steps,\n lr_decay_gamma=lr_decay_gamma,\n optimizer=optimizer,\n weight_decay=weight_decay,\n momentum=momentum,\n nesterov=nesterov,\n pretrained_weight_path=pretrained_weight_path,\n checkpoint_path=checkpoint_path,\n log_path=log_path,\n use_tensorboard=use_tensorboard,\n model_c=model_c,\n model_nof_joints=model_nof_joints,\n model_bn_momentum=model_bn_momentum,\n flip_test_images=flip_test_images,\n device=device\n )",
"def main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--dataset_path',\n type=str,\n default='../Datasets2/cifar100Dataset.npy',\n help='location of the dataset in numpy format', )\n parser.add_argument('--train_steps',\n type=int,\n default=150000,\n help='training steps', )\n parser.add_argument('--measuring_step_size',\n type=float,\n default=0.1,\n help='step size to where a second loss is determined to approximate the loss function '\n 'in the direction of the gradient by a parabola', )\n parser.add_argument('--momentum',\n type=float,\n default=0.4,\n help='momentum term', )\n parser.add_argument('--batch_size',\n type=int,\n default=128,\n help='batch_size', )\n parser.add_argument('--experiment_name',\n type=str,\n default=\"testmodel\",\n help='the name of the experiment', )\n parser.add_argument('--loose_approximation_factor',\n type=float,\n default=1.0,\n help='intentionally approximate the function with less or more curvature. = 1/ step size adaptation '\n 'less curvature <1 more curvature >1', )\n parser.add_argument('--train_data_size',\n type=int,\n default=45000,\n help='train data size,remaining elements define the evaluation_Res_Net set', )\n parser.add_argument('--random_seed',\n type=int,\n default=1,\n help='random number seed for numpy and tensorflow to get same results for multiple runs', )\n parser.add_argument('--max_stepsize',\n type=float,\n default=3.6,\n help='max stepsize in direction of the gradient', )\n parser.add_argument('--decay',\n type=float,\n default=1,\n help='max stepsize and measurment stepsize decay rate', )\n parser.add_argument('--additional',\n type=float,\n default=100,\n help='additional parameter', )\n parser.add_argument('--num_gpus',\n type=int,\n default=1,\n help='num gpus to train on', )\n parser.add_argument('--optimizer',\n type=str,\n default=\"SLS\",\n help='the optimizer to use', )\n\n FLAGS, unparsed = parser.parse_known_args()\n for k, v in vars(FLAGS).items():\n k, v = str(k), str(v)\n print('%s: %s' % (k, v))\n FLAGS.dataset_path = os.path.expanduser(FLAGS.dataset_path)\n print(\"DatasetPath: \" + str(FLAGS.dataset_path))\n\n workpath = os.path.dirname(os.path.dirname(sys.argv[0])) + '/' # double dir name to get parent\n\n print(\"workpath: \" + workpath)\n\n # check gpus\n local_device_protos = device_lib.list_local_devices()\n num_available_gpus = len([x.name for x in local_device_protos if x.device_type == 'GPU'])\n assert num_available_gpus >= FLAGS.num_gpus\n print(\"GPUs available: {1:d} \\t GPUs used: {1:d}\".format(num_available_gpus, FLAGS.num_gpus))\n\n learning_rate_pf = lambda global_step, learning_rate: tf.train.piecewise_constant(global_step,\n [75000.0, 112500.0],\n [float(learning_rate),\n float(learning_rate / 10),\n float(learning_rate / 100),\n ])\n\n if FLAGS.optimizer == \"PAL\":\n optimizer = PAL(None, FLAGS.measuring_step_size, FLAGS.momentum, FLAGS.loose_approximation_factor,\n FLAGS.max_stepsize, False)\n elif FLAGS.optimizer == \"SLS\":\n optimizer = SLS(n_batches_per_epoch=FLAGS.train_data_size // FLAGS.batch_size,\n init_step_size=FLAGS.measuring_step_size, c=FLAGS.momentum,\n beta_b=FLAGS.loose_approximation_factor, gamma=FLAGS.max_stepsize)\n elif FLAGS.optimizer == \"OL\":\n #optimizer = OptimalLineSearch(initial_search_step=FLAGS.measuring_step_size,\n # max_num_of_steps=FLAGS.max_stepsize, momentum=FLAGS.momentum)\n optimizer = OptimalLineSearch(initial_search_step=1.0,\n max_num_of_steps=20.0, momentum=0.0)\n\n elif FLAGS.optimizer == \"RMSP\":\n optimizer = TfOptimizer(tf.train.RMSPropOptimizer, learning_rate_pf,\n {\"learning_rate\": FLAGS.measuring_step_size, \"decay\": FLAGS.momentum,\n \"epsilon\": FLAGS.loose_approximation_factor})\n elif FLAGS.optimizer == \"ADAM\":\n optimizer = TfOptimizer(tf.train.AdamOptimizer, learning_rate_pf,\n {\"learning_rate\": FLAGS.measuring_step_size, \"beta1\": FLAGS.momentum,\n \"beta2\": FLAGS.loose_approximation_factor, \"epsilon\": FLAGS.max_stepsize})\n elif FLAGS.optimizer == \"SGD\":\n optimizer = TfOptimizer(tf.train.MomentumOptimizer, learning_rate_pf,\n {\"learning_rate\": FLAGS.measuring_step_size, \"momentum\": FLAGS.momentum,\n \"use_nesterov\": True})\n elif FLAGS.optimizer == \"SGDHD\":\n optimizer = TfOptimizer(SGDHD, None,\n {\"learning_rate\": FLAGS.measuring_step_size, \"hyper_gradient_learning_rate\": FLAGS.momentum})\n elif FLAGS.optimizer == \"ALIG\":\n optimizer = TfOptimizer(AliGwithMomentum, None,\n {\"max_lr\": FLAGS.measuring_step_size, \"momentum\": FLAGS.momentum})\n elif FLAGS.optimizer == \"COCOB\":\n optimizer = TfOptimizer(COCOB, None,\n {\"alpha\": FLAGS.measuring_step_size})\n\n else:\n raise ValueError(\"unknown optimizer flag:\" + FLAGS.optimizer)\n\n # Uncomment the network and dataset to use!\n\n # net_type= tolstoi_rnn.TolstoiRNN\n\n # net_type= simple_mnist_net.SimpleMnistNet\n\n # net_type=efficient_net_cifar10.EfficientNet\n # net_type=mobile_net_v2_cifar10.MobileNetV2\n net_type = resnet_32_cifar10.ResNet\n # net_type=dense_net_cifar10.DenseNet\n # net_type=resnet_34_IN_style_cifar.ResNet\n\n # net_type=efficient_net_cifar100.EfficientNet\n # net_type=mobile_net_v2_cifar100.MobileNetV2\n # net_type=resnet_32_cifar100.ResNet\n # net_type=dense_net_cifar100.DenseNet\n\n # net_type=efficient_net_IM.EfficientNet\n # net_type=mobile_net_v2_IM.MobileNetV2\n # net_type=resnet_101_IM.ResNet\n # net_type=resnet_50_IM.ResNet\n # net_type=dense_net_IM.DenseNet\n\n # data_set_loader = ImageNetLoader\n #data_set_loader = Cifar10Loader # also uncomment is_augment\n #data_set_loader.is_augment = True\n data_set_loader = Cifar100Loader\n # data_set_loader = TolstoiLoader\n # data_set_loader= MNISTLoader\n\n sys.stdout.flush()\n\n if FLAGS.optimizer == \"OL\":\n net = net_frame_ol.NetFrame(net_type, data_set_loader, optimizer, FLAGS.num_gpus, FLAGS.random_seed,\n FLAGS.train_data_size,\n FLAGS.batch_size, FLAGS.dataset_path, workpath, FLAGS.experiment_name,\n is_calc_angle=False)\n else:\n net = net_frame.NetFrame(net_type, data_set_loader, optimizer, FLAGS.num_gpus, FLAGS.random_seed,\n FLAGS.train_data_size,\n FLAGS.batch_size, FLAGS.dataset_path, workpath, FLAGS.experiment_name,\n is_calc_angle=False) # 100. 0.001 # problem 1,1 or 20,1 -> very steep descent!\n\n is_failed = False\n try:\n mean_train_losses_per_interval, evaluation_accuracies, train_losses_for_each_step, step_sizes_for_each_step, \\\n angles_for_each_step, grad_norms_for_each_step, train_time_for_each_step, tran_acc_per_interval, \\\n eval_losses, avg_test_acc, avg_test_loss, all_first_derivatives, all_second_derivatives \\\n = net.train(FLAGS.train_steps)\n except Exception as e:\n print(e.__doc__)\n is_failed = True\n print(\"FAILED\")\n\n if is_failed:\n eval_data_wrapper = fu.EvalDataWrapper(FLAGS.experiment_name, FLAGS.random_seed, FLAGS.optimizer,\n FLAGS.train_data_size,\n FLAGS.train_steps, FLAGS.batch_size, FLAGS.measuring_step_size,\n FLAGS.momentum,\n FLAGS.loose_approximation_factor, FLAGS.max_stepsize, FLAGS.decay,\n FLAGS.additional, [], [], [], [], None, None, is_failed)\n else:\n eval_data_wrapper = fu.EvalDataWrapper(FLAGS.experiment_name, FLAGS.random_seed, FLAGS.optimizer,\n FLAGS.train_data_size,\n FLAGS.train_steps, FLAGS.batch_size, FLAGS.measuring_step_size,\n FLAGS.momentum,\n FLAGS.loose_approximation_factor, FLAGS.max_stepsize, FLAGS.decay,\n FLAGS.additional, mean_train_losses_per_interval, tran_acc_per_interval,\n evaluation_accuracies, eval_losses, avg_test_acc, avg_test_loss, is_failed,\n angles_for_each_step, step_sizes_for_each_step, grad_norms_for_each_step,\n all_first_derivatives, all_second_derivatives\n )\n\n fu.save_eval_data_wrapper(eval_data_wrapper, net.model_dir)",
"def train_loop(job_name,\n agent,\n save_dir,\n seed = 0,\n niter = 101,\n gamma = 0.995,\n gae_lambda = None,\n num_cpu = 1,\n sample_mode = 'trajectories',\n num_samples = None,\n save_freq = 10,\n evaluation_rollouts = None,\n plot_keys = ['stoc_pol_mean']):\n # Validate parameters.\n if not os.path.isdir(save_dir):\n raise ValueError('Save directory {} does not exist'.format(save_dir))\n if sample_mode not in ['trajectories', 'samples']:\n raise ValueError('Invalid sample mode: {}'.format(sample_mode))\n\n # Choose a default for num_samples if not specified.\n if num_samples is None:\n num_samples = 50 if sample_mode == 'trajectories' else 50000\n\n # Initialize the folders in the save directory.\n iterations_dir = os.path.join(save_dir, 'iterations')\n if not os.path.isdir(iterations_dir):\n os.mkdir(iterations_dir)\n logs_dir = os.path.join(save_dir, 'logs')\n if agent.save_logs and not os.path.isdir(logs_dir):\n os.mkdir(logs_dir)\n\n # Initialize results log file.\n results_path = os.path.join(save_dir, 'results.txt')\n open(results_path, 'w').close()\n\n # Initialize training variables.\n np.random.seed(seed)\n best_policy = copy.deepcopy(agent.policy)\n best_perf = -1e8\n train_curve = best_perf * np.ones(niter)\n mean_pol_perf = 0.0\n\n # Prefix tensorboard logs with the job name.\n # tb_logger = tensorboard.get_prefixed(job_name)\n tb_logger = []\n # print('Starting training for job: {}'.format(job_name))\n\n for i in range(niter):\n print('.' * 80 + '\\nITERATION : {}'.format(i))\n\n if train_curve[i-1] > best_perf:\n best_policy = copy.deepcopy(agent.policy)\n best_perf = train_curve[i-1]\n\n stats = agent.train_step(\n N=num_samples,\n sample_mode=sample_mode,\n gamma=gamma,\n gae_lambda=gae_lambda,\n num_cpu=num_cpu,\n )\n train_curve[i] = stats[0]\n\n if evaluation_rollouts is not None and evaluation_rollouts > 0:\n print('Performing evaluation rollouts ........')\n mean_pol_perf = _evaluation_rollout(agent, evaluation_rollouts, num_cpu)\n if agent.save_logs:\n agent.logger.log_kv('eval_score', mean_pol_perf)\n\n if i % save_freq == 0 and i > 0:\n _save_policy(agent.policy, 'policy_{}'.format(i), iterations_dir)\n _save_policy(agent.baseline, 'baseline_{}'.format(i), iterations_dir)\n _save_policy(best_policy, 'best_policy', iterations_dir)\n if agent.save_logs:\n agent.logger.save_log(logs_dir)\n make_train_plots(log=agent.logger.log, keys=plot_keys, save_loc=logs_dir)\n\n _log_performance(i, train_curve[i], mean_pol_perf, best_perf,\n results_path, tb_logger)\n if agent.save_logs:\n print_data = sorted(filter(lambda v: np.asarray(v[1]).size == 1,\n agent.logger.get_current_log().items()))\n print(tabulate(print_data))\n\n # Save the final best policy.\n _save_policy(best_policy, 'best_policy', iterations_dir)\n if agent.save_logs:\n agent.logger.save_log(logs_dir)\n make_train_plots(log=agent.logger.log, keys=plot_keys, save_loc=logs_dir)",
"def train(self, verbose=True):\n\n\n learned = False\n iteration = 0\n\n from util.loss_functions import DifferentError\n loss = DifferentError()\n\n\n\n\n\n # Train for some epochs if the error is not 0\n while not learned:\n # x ist ein Bild bestehend aus einem Label (erster Eintrag) und 784 Pixeln\n # t ist das Zielergebnis von x (überprüfbar mit dem Label)\n # o ist der tatsächliche Ergebnis von x\n # w ist der Gewichtsvektor\n # Als Aktivierungsfunktion verwenden wir die Sigmoid Funktion\n # Das Training wird dann beendet, sobald das Fehlerkriterium konvergiert\n\n totalError = 0\n\n output = []\n labels = self.trainingSet.label\n inputs = self.trainingSet.input\n\n # iteriere für jede Instanz im Trainingsset x € X\n for input in inputs:\n # Ermittle O_x = sig(w*x)\n output.append(self.fire(input))\n\n # Ermittle Fehler AE = tx - ox\n error = loss.calculateError(np.array(labels), np.array(output))\n\n # grad = [0]\n grad = np.zeros(len(self.trainingSet.input[0]))\n grad2 = np.zeros(len(self.trainingSet.input[0]))\n\n for e, input, out in zip(error, inputs, output):\n activationPrime = Activation.getDerivative(activationName)(np.dot(np.array(input), self.weight))\n #grad += np.multiply( np.multiply( input, e), activationPrime)\n grad += np.multiply( input, e)\n\n # Update grad = grad + errorPrime * x * activationPrime\n\n\n\n # print grad - grad2\n #print \"Error: \" + str(error) + \" Grad: \" + str(grad)\n\n # update w: w <- w + n*grad\n self.updateWeights(grad)\n\n\n iteration += 1\n totalError = error.sum()\n\n if verbose:\n logging.info(\"Epoch: %i; Error: %i\", iteration, totalError)\n\n if abs(totalError) < 0.01 or iteration >= self.epochs:\n # stop criteria is reached\n learned = True\n\n pass",
"def main():\n parser = argparse.ArgumentParser(description='Implementation of the Naive Bayes and Perceptron classifiers')\n parser.add_argument('--statsmode', help='whether to gather stats or not', choices=['y','Y','N','n'], default='n')\n parser.add_argument('--classifier', help='classifier to use', choices=['BAYES', 'PERCEPTRON'], required=True)\n parser.add_argument('--mode', help='image class to test', choices=['VALIDATION', 'TEST'], default='TEST')\n parser.add_argument('--type', help='image type to train', choices=['DIGIT', 'FACE', 'MNIST'], required=True)\n parser.add_argument('--range', metavar=('START', 'END_EXCLUSIVE'), nargs=2, type=int, help='Range of data to test', default=[0, 100])\n parser.add_argument('--trainpercent', metavar='PERCENT', type=int, help='the percent of training data to use (int out of 100)', default=100, dest='percentage')\n parser.add_argument('--smoothing', type=int, help='Laplace smoothing constant (Naive Bayes)', default=2)\n parser.add_argument('--iterations', type=int, help='Number of times to iterate over training data (Perceptron)', default=5)\n parser.add_argument('--debug', help='Outputs more detailed information to stdout', action='store_true')\n parser.add_argument('--statloops', type=int, help='Number of times the classifier iterates over test data (Statistics only)', default=5)\n args = parser.parse_args()\n # image_type = ImageType.DIGIT if args.type == 'DIGIT' else ImageType.FACE\n image_type = None\n if args.type == 'DIGIT':\n image_type = ImageType.DIGIT\n elif args.type == 'FACE':\n image_type = ImageType.FACE\n else:\n image_type = ImageType.MNIST\n mode = Mode.TEST if args.mode == 'TEST' else Mode.VALIDATION\n if args.statsmode == 'y' or args.statsmode == 'Y':\n run_percentages_classifier(args.classifier, image_type, args)\n else:\n run = run_classifier_bayes if args.classifier == 'BAYES' else run_classifier_perceptron\n run(mode, image_type, args)",
"def main(tetrode_number=TETRODE_NUMBER,num_hidden_units=500,num_hidden_units_2=300,num_hidden_units_3=200,num_code_units=50):\n \n print(\"Making the model...\")\n network = model((None,200),200,num_hidden_units,num_hidden_units_2,num_hidden_units_3,num_code_units)\n print(\"Done!\")\n\n\n for tetrode_number in [10]:\n\n print(\"Loading the model parameters from {}\".format(MODEL_FILENAME+str(tetrode_number)))\n f = open(MODEL_FILENAME+str(tetrode_number),'r')\n all_param_values = pickle.load(f)\n f.close()\n # print(all_param_values)\n lasagne.layers.set_all_param_values(network, all_param_values)\n\n print(\"Loading the data...\")\n dataset = load_data(tetrode_number)\n print(\"Done!\")\n\n print(dataset['data'].shape)\n\n print(\"Setting up the training functions...\")\n training = funcs(dataset,network)\n print(\"Done!\")\n\n for i in range(NUM_EPOCHS):\n costs = []\n\n for start, end in zip(range(0, dataset['data'].shape[0], BATCH_SIZE), range(BATCH_SIZE, dataset['data'].shape[0], BATCH_SIZE)):\n cost = training['train'](dataset['data'][start:end],dataset['data'][start:end])\n costs.append(cost)\n\n meanTrainCost = np.mean(np.asarray(costs,dtype=np.float32))\n # accuracy = training['accuracy'](dataset['X_test'],dataset['y_test'])\n\n print(\"Epoch: {}, Training cost: {}\".format(i+1,meanTrainCost))\n # NUM_POINTS = 5000\n codes = training['code'](dataset['data'][0:NUM_POINTS])\n\n \n\n # y = set(list(d.predict(dataset['data'][0:NUM_POINTS])))\n\n # print(y)\n\n # activations_1 = training['activations_1'](dataset['data'][0:NUM_POINTS])\n # activations_2 = training['activations_2'](dataset['data'][0:NUM_POINTS])\n # codes = training['code'](dataset['data'][0:NUM_POINTS])\n # # print(codes.shape)\n # # codes_2d = bh_sne(codes)\n\n # for k in range(3):\n # print(k)\n\n # codes_2d = bh_sne(np.asarray(codes[:(k+1)*12000],dtype=np.float64))\n\n # # d = DPGMM(n_components=10, covariance_type='full')\n # d = DPGMM(n_components=15,n_iter=100)\n\n # d.fit(codes_2d[:(k+1)*12000])\n\n # hdp = d.predict_proba(codes_2d[:(k+1)*12000])\n\n # hdp_1d = [np.argmax(z) for z in hdp]\n\n # print(set(list(hdp_1d)))\n\n # plt.scatter(codes_2d[:, 0], codes_2d[:, 1], c=hdp_1d, alpha=0.8,lw=0)\n # plt.savefig('dbscan_labels/deep/sparse/hdp_{}_{}.png'.format(tetrode_number,k), bbox_inches='tight')\n # plt.close()\n\n # # m = TSNE(n_components=2, random_state=0)\n \n # # codes_2d = m.fit_transform(codes[:NUM_POINTS])\n # # activations_1_2d = bh_sne(activations_1)\n # # activations_2_2d = bh_sne(activations_2)\n\n # plt.scatter(codes_2d[:, 0], codes_2d[:, 1], c=dataset['labels'][0:NUM_POINTS][:(k+1)*12000],alpha=0.8,lw=0)\n # plt.savefig('dbscan_labels/deep/sparse/tsne_codes_{}_{}.png'.format(tetrode_number,k), bbox_inches='tight')\n # plt.close()\n\n # # This is where the code for the video will go\n # ##############################################################################\n # # Compute DBSCAN\n # db = None\n # core_samples_mask = None\n # labels = None\n\n # num_labels = 0\n # eps=1.0\n # while(num_labels < 10):\n # db = DBSCAN(eps=eps, min_samples=10).fit(codes_2d)\n # core_samples_mask = np.zeros_like(db.labels_, dtype=bool)\n # core_samples_mask[db.core_sample_indices_] = True\n # labels = db.labels_\n # num_labels = np.amax(labels)\n # eps -= 0.1\n\n # print(\"Num learned labels: {}\".format(num_labels))\n\n # plt.title('Estimated number of clusters: {}'.format(np.amax(labels)))\n # plt.scatter(codes_2d[:, 0], codes_2d[:, 1], c=labels[0:NUM_POINTS][:(k+1)*12000],lw=0)\n # plt.savefig('dbscan_labels/deep/sparse/dbscan_codes_{}_{}.png'.format(tetrode_number,k), bbox_inches='tight')\n # plt.close()\n\n # # f=open('dbscan_labels/deep/sparse/tetrode_{}.npy'.format(tetrode_number),'w')\n # # pickle.dump(labels, f)\n # # f.close()\n\n codes_2d = bh_sne(np.asarray(codes,dtype=np.float64),theta=0.4)\n\n # d = DPGMM(n_components=10, covariance_type='full')\n d = DPGMM(n_components=15,n_iter=1000)\n\n d.fit(codes_2d)\n\n hdp = d.predict_proba(codes_2d)\n\n hdp_1d = [np.argmax(z) for z in hdp]\n\n print(set(list(hdp_1d)))\n\n plt.scatter(codes_2d[:, 0], codes_2d[:, 1], c=hdp_1d, alpha=0.8,lw=0)\n plt.savefig('dbscan_labels/deep/sparse/hdp_{}.png'.format(tetrode_number), bbox_inches='tight')\n plt.close()\n\n # m = TSNE(n_components=2, random_state=0)\n \n # codes_2d = m.fit_transform(codes[:NUM_POINTS])\n # activations_1_2d = bh_sne(activations_1)\n # activations_2_2d = bh_sne(activations_2)\n\n plt.scatter(codes_2d[:, 0], codes_2d[:, 1], c=dataset['labels'][0:NUM_POINTS],alpha=0.8,lw=0)\n plt.savefig('dbscan_labels/deep/sparse/tsne_codes_{}.png'.format(tetrode_number), bbox_inches='tight')\n plt.close()\n\n # This is where the code for the video will go\n ##############################################################################\n # Compute DBSCAN\n db = None\n core_samples_mask = None\n labels = None\n\n num_labels = 0\n eps=1.0\n while(num_labels < 10):\n db = DBSCAN(eps=eps, min_samples=10).fit(codes_2d)\n core_samples_mask = np.zeros_like(db.labels_, dtype=bool)\n core_samples_mask[db.core_sample_indices_] = True\n labels = db.labels_\n num_labels = np.amax(labels)\n eps -= 0.1\n\n print(\"Num learned labels: {}\".format(num_labels))\n\n plt.title('Estimated number of clusters: {}'.format(np.amax(labels)))\n plt.scatter(codes_2d[:, 0], codes_2d[:, 1], c=labels[0:NUM_POINTS],lw=0)\n plt.savefig('dbscan_labels/deep/sparse/dbscan_codes_{}.png'.format(tetrode_number), bbox_inches='tight')\n plt.close()\n\n # f=open('dbscan_labels/deep/sparse/tetrode_{}.npy'.format(tetrode_number),'w')\n # pickle.dump(labels, f)\n # f.close()",
"def train_all(self):\n for p in self.parameters():\n p.requires_grad = True\n return self",
"def train_and_eval(params: flags.FlagValues) -> tf.keras.callbacks.History:\n logging.info('Run training for {} with {}'.format(params.model_name,\n params.dataset_name))\n logging.info('The CLI params are: {}'.format(params.flag_values_dict()))\n d_config = _get_dataset_config().get(params.dataset_name)()\n m_config = _get_model_config().get(params.model_name)()\n\n logging.info('Training dataset configuration:', d_config)\n logging.info('Training model configuration:', m_config)\n\n # override the model params with CLI params\n m_config.num_classes = d_config.num_classes\n m_config.dropout_keep_prob = 1 - params.dropout_rate\n m_config.weight_decay = params.std_weight_decay\n m_config.stddev = params.truncated_normal_stddev\n m_config.batch_norm_decay = params.batch_norm_decay\n\n strategy = tf.distribute.MirroredStrategy()\n with strategy.scope():\n # override the dataset params with CLI params\n if params.data_dir:\n d_config.data_dir = params.data_dir\n global_batch_size = params.batch_size * strategy.num_replicas_in_sync\n\n # override the dataset params with CLI params\n # for distributed training, update batch size\n d_config.batch_size = global_batch_size\n # determine whether one_hot is used based on label_smoothing\n d_config.one_hot = params.label_smoothing and params.label_smoothing > 0\n\n # build train dataset\n train_dataset = get_dataset(d_config)\n # build validation dataset\n d_config.split = 'validation'\n eval_dataset = get_dataset(d_config)\n\n # compute number iterations per epoch\n steps_per_epoch = d_config.num_examples // d_config.batch_size\n eval_steps = d_config.num_eval_examples // d_config.batch_size\n\n # build the model\n keras_model = build_model(\n model_name=params.model_name,\n dataset_config=d_config,\n model_config=m_config\n )\n\n # build the optimizer\n learning_params = defaults.LR_CONFIG_DEFAULT\n learning_params.update({'initial_lr': params.lr,\n 'decay_epochs': params.lr_decay_epochs,\n 'decay_rate': params.lr_decay_rate})\n optimizer_params = defaults.OP_CONFIG_DEFAULT\n optimizer_params.update({'decay': params.op_decay_rate,\n 'momentum': params.op_momentum})\n optimizer = _get_optimizer(\n batch_size=global_batch_size,\n steps_per_epoch=steps_per_epoch,\n lr_name=params.learning_scheduler_name,\n optimizer_name=params.optimizer_name,\n lr_params=learning_params,\n optimizer_params=optimizer_params\n )\n\n logging.info('Exponential decay rate:{}'.format(params.ma_decay_rate))\n if params.ma_decay_rate:\n optimizer = tfa.optimizers.MovingAverage(\n optimizer=optimizer,\n average_decay=params.ma_decay_rate)\n\n # compile model\n if d_config.one_hot:\n loss_obj = tf.keras.losses.CategoricalCrossentropy(\n label_smoothing=params.label_smoothing)\n else:\n loss_obj = tf.keras.losses.SparseCategoricalCrossentropy()\n\n keras_model.compile(\n optimizer=optimizer,\n loss=loss_obj,\n metrics=[_get_metrics(one_hot=d_config.one_hot)['acc']],\n )\n\n logging.info(keras_model.summary())\n\n initial_epoch = 0\n if params.resume_checkpoint:\n initial_epoch = _resume_from_checkpoint(model=keras_model,\n model_dir=params.model_dir,\n train_steps=steps_per_epoch)\n\n # Callbacks\n callbacks_to_use = _get_callback(model_dir=params.model_dir)\n\n # Train model\n history = keras_model.fit(\n train_dataset,\n steps_per_epoch=steps_per_epoch,\n epochs=params.epochs,\n validation_data=eval_dataset,\n validation_steps=eval_steps,\n initial_epoch=initial_epoch,\n verbose=1,\n callbacks=callbacks_to_use\n )\n\n return history",
"def train():\n\t# 1、make dataloader\n\ttrain_loader, val_loader, num_query, num_class = make_data_loader(cfg)\n\t#print(\"num_query:{},num_class:{}\".format(num_query,num_class))\n\n\t# 2、make model\n\tmodel = build_model(cfg, num_class)\n\n\t# model.eval()\n\t# x = model(img_tensor)\n\t# print(x.shape)\n\t# 3、 make optimizer\n\toptimizer = make_optimizer(cfg, model)\n\n\t# 4、 make lr_scheduler\n\tscheduler = make_lr_scheduler(cfg, optimizer)\n\n\t# 5、 make loss_func\n\tif cfg.MODEL.PCB_NECK:\n\t\t# make loss specificially for pcb \n\t\tloss_func = get_softmax_triplet_loss_fn(cfg, num_class)\n\telse:\n\t\tloss_func = make_loss(cfg, num_class)\n\n\t# get paramters\n\tlog_period = cfg.OUTPUT.LOG_PERIOD \n\tckpt_period =cfg.OUTPUT.CHECKPOINT_PERIOD\n\teval_period = cfg.OUTPUT.EVAL_PERIOD\n\toutput_dir = cfg.OUTPUT.ROOT_DIR\n\tdevice = cfg.MODEL.DEVICE\n\tepochs = cfg.SOLVER.MAX_EPOCHS\n\tuse_gpu = device == \"cuda\"\n\tuse_neck = cfg.MODEL.NECK or cfg.MODEL.LEARN_REGION \n\t# how many batch for each log\n\tbatch_size = cfg.SOLVER.IMGS_PER_BATCH\n\tbatch_num = len(train_loader) \n\t\n\tlog_iters = batch_num // log_period\n\tpretrained = cfg.MODEL.PRETRAIN_PATH != ''\n\tparallel = cfg.MODEL.PARALLEL \t\n\tgrad_clip = cfg.DARTS.GRAD_CLIP \n\n\tfeat_norm = cfg.TEST.FEAT_NORM \n\tckpt_save_path = cfg.OUTPUT.ROOT_DIR + cfg.OUTPUT.CKPT_DIR\n\tif not os.path.exists(ckpt_save_path):\n\t\tos.makedirs(ckpt_save_path)\n\n\n\t# create *_result.xlsx\n\t# save the result for analyze\n\tname = (cfg.OUTPUT.LOG_NAME).split(\".\")[0] + \".xlsx\"\n\tresult_path = cfg.OUTPUT.ROOT_DIR + name\n\n\twb = xl.Workbook()\n\tsheet = wb.worksheets[0]\n\ttitles = ['size/M','speed/ms','final_planes', 'acc', 'mAP', 'r1', 'r5', 'r10', 'loss',\n\t\t\t 'acc', 'mAP', 'r1', 'r5', 'r10', 'loss','acc', 'mAP', 'r1', 'r5', 'r10', 'loss']\n\tsheet.append(titles)\n\tcheck_epochs = [40, 80, 120, 160, 200, 240, 280, 320, 360, epochs]\n\tvalues = []\n\n\tlogger = logging.getLogger('MobileNetReID.train')\n\t\n\t# count parameter\n\tsize = count_parameters(model)\n\tlogger.info(\"the param number of the model is {:.2f} M\".format(size))\n\t\n\tvalues.append(format(size, '.2f'))\n\tvalues.append(model.final_planes)\n\n\tlogger.info(\"Start training\")\n\t\n\t#count = 183, x, y = batch -> 11712 for train\n\tif pretrained:\n\t\tstart_epoch = model.start_epoch\n\n\tif parallel:\n\t\tmodel = nn.DataParallel(model)\n\n\tif use_gpu:\n\t\t# model = nn.DataParallel(model)\n\t\tmodel.to(device)\n\t\n\t# save the best model\n\tbest_mAP, best_r1 = 0., 0.\n\tis_best = False\n\t# batch : img, pid, camid, img_path\n\tavg_loss, avg_acc = RunningAverageMeter(), RunningAverageMeter()\n\tavg_time, global_avg_time = AverageMeter(), AverageMeter()\n\tglobal_avg_time.reset()\n\tfor epoch in range(epochs):\n\t\tscheduler.step()\n\n\t\tif pretrained and epoch < start_epoch - 1:\n\t\t\tcontinue\n\t\n\t\tmodel.train()\n\t\t# sum_loss, sum_acc = 0., 0.\n\t\tavg_loss.reset()\n\t\tavg_acc.reset()\n\t\tavg_time.reset()\n\t\tfor i, batch in enumerate(train_loader):\n\n\t\t\tt0 = time.time()\n\t\t\timgs,labels = batch\n\n\t\t\tif use_gpu:\n\t\t\t\timgs = imgs.to(device)\n\t\t\t\tlabels = labels.to(device)\n\n\t\t\tres = model(imgs)\n\t\t\t# score, feat = model(imgs)\n\t\t\t# loss = loss_func(score, feat, labels)\n\t\t\tloss, acc = compute_loss_acc(use_neck, res, labels, loss_func)\n\t\t\t\n\t\t\tloss.backward()\n\t\t\tif grad_clip != 0:\n\t\t\t\tnn.utils.clip_grad_norm(model.parameters(), grad_clip)\n\n\t\t\toptimizer.step()\n\n\t\t\toptimizer.zero_grad()\n\n\t\t\t# acc = (score.max(1)[1] == labels).float().mean()\n\n\t\t\t# sum_loss += loss\n\t\t\t# sum_acc += acc \n\t\t\tt1 = time.time()\n\t\t\tavg_time.update((t1 - t0) / batch_size)\n\t\t\tavg_loss.update(loss)\n\t\t\tavg_acc.update(acc)\n\n\t\t\t#log the info \n\t\t\tif (i+1) % log_iters == 0:\n\n\t\t\t\tlogger.info(\"epoch {}: {}/{} with loss is {:.5f} and acc is {:.3f}\".format(\n\t\t\t\t\t epoch+1, i+1, batch_num, avg_loss.avg, avg_acc.avg))\n\n\t\tlr = optimizer.state_dict()['param_groups'][0]['lr']\n\t\tlogger.info(\"end epochs {}/{} with lr: {:.5f} and avg_time is {:.3f} ms\".format(epoch+1, epochs, lr, avg_time.avg * 1000))\n\t\tglobal_avg_time.update(avg_time.avg)\n\t\t# change the lr \n\n\t\t# eval the model \n\t\tif (epoch+1) % eval_period == 0 or (epoch + 1) == epochs :\n\t\t\t\n\t\t\tmodel.eval()\n\t\t\tmetrics = R1_mAP(num_query, use_gpu = use_gpu, feat_norm = feat_norm)\n\n\t\t\twith torch.no_grad():\n\n\t\t\t\tfor vi, batch in enumerate(val_loader):\n\t\t\t\t\t\n\t\t\t\t\timgs, labels, camids = batch\n\n\t\t\t\t\tif use_gpu:\n\t\t\t\t\t\timgs = imgs.to(device)\n\n\t\t\t\t\tfeats = model(imgs)\n\t\t\t\t\tmetrics.update((feats,labels, camids))\n\n\t\t\t\t#compute cmc and mAP\n\t\t\t\tcmc, mAP = metrics.compute()\n\t\t\t\tlogger.info(\"validation results at epoch:{}\".format(epoch + 1))\n\t\t\t\tlogger.info(\"mAP:{:.2%}\".format(mAP))\n\t\t\t\tfor r in [1,5,10]:\n\t\t\t\t\tlogger.info(\"CMC curve, Rank-{:<3}:{:.2%}\".format(r,cmc[r-1]))\t\n\n\t\t\t\t# determine whether cur model is the best \n\t\t\t\tif mAP > best_mAP:\n\t\t\t\t\tis_best = True\n\t\t\t\t\tbest_mAP = mAP\n\t\t\t\t\tlogger.info(\"Get a new best mAP\")\n\t\t\t\tif cmc[0] > best_r1:\n\t\t\t\t\tis_best = True\n\t\t\t\t\tbest_r1 = cmc[0]\n\t\t\t\t\tlogger.info(\"Get a new best r1\")\n\n\t\t\t\t# add the result to sheet\n\t\t\t\tif (epoch + 1) in check_epochs:\n\t\t\t\t\tval = [avg_acc.avg, mAP, cmc[0], cmc[4], cmc[9]]\n\t\t\t\t\tchange = [format(v * 100, '.2f') for v in val]\n\t\t\t\t\tchange.append(format(avg_loss.avg, '.3f'))\n\t\t\t\t\tvalues.extend(change)\n\n\n\t\t# we hope that eval_period == ckpt_period or eval_period == k* ckpt_period where k is int\t\t\t\n\t\t# whether to save the model\n\t\tif (epoch+1) % ckpt_period == 0 or is_best:\n\n\t\t\tif parallel:\n\t\t\t\ttorch.save(model.module.state_dict(), ckpt_save_path + \"checkpoint_{}.pth\".format(epoch + 1 ))\n\t\t\telse:\n\t\t\t\ttorch.save(model.state_dict(), ckpt_save_path + \"checkpoint_{}.pth\".format(epoch + 1 ))\n\n\t\t\tlogger.info(\"checkpoint {} saved !\".format(epoch + 1))\n\n\t\t\tif is_best:\n\t\t\t\tif parallel:\n\t\t\t\t\ttorch.save(model.module.state_dict(), ckpt_save_path + \"best_ckpt.pth\")\n\t\t\t\telse:\n\t\t\t\t\ttorch.save(model.state_dict(), ckpt_save_path + \"best_ckpt.pth\")\n\t\t\t\tlogger.info(\"best checkpoint was saved\")\n\t\t\t\tis_best = False\n\t\n\tvalues.insert(1, format(global_avg_time.avg * 1000, '.2f'))\n\tsheet.append(values)\n\twb.save(result_path)\n\n\tlogger.info(\"training is end, time for per imgs is {} ms\".format(global_avg_time.avg *1000))",
"def train_model(self):\r\n alpha, accuracy_rate = self.select_model()\r\n # Initialize logistic regression with alpha(learning rate)\r\n lg = logisticregression(C=alpha)\r\n # Train the model.\r\n lg.fit(self.training_data, self.target_data)\r\n # Save the trained model as .pkl file.\r\n joblib.dump(value=lg, filename=self.intention_id+'.pkl', compress=1)\r\n print \"Estimated Parameters of Logistic Regression\"\r\n # Estimated parameters of logistic regression.\r\n print lg.get_params()",
"def main(model_type, hyperparameter=None, data_version=\"version_6\", evaluation=False, first_loc=1, end_loc=7):\n # assign parameter save and load path\n parameter_path = f\"Model_parameters/{data_version}\"\n print(f'data path: {parameter_path}')\n\n if model_type == \"Regressor\":\n # preprocess for MLP preceptron\n X_train, y_train, X_del, y_del, X_test, y_test = \\\n pre_processing.merge_split(data_version=data_version, first_loc=first_loc, end_loc=end_loc)\n\n # training MLP preceptron\n regressor = Regressor.regression(X_train, y_train, X_test, y_test,\n hyperparameter=hyperparameter, version=data_version)\n\n # save model and prediction result\n Save_model.save_Preceptron(regressor, X_test, y_test, path=parameter_path, overwrite=True)\n\n # evaluate fitting process\n if evaluation:\n plot_learning_curve.evaluation_learning_curve(regressor, X_train, y_train,\n title=f\"{regressor.get_params()['hidden_layer_sizes']}\")\n\n elif model_type == \"Classifier\":\n # preprocess for MLP preceptron\n X_train, y_train, X_del, y_del, X_test, y_test = \\\n pre_processing.merge_split(data_version=data_version, first_loc=first_loc, end_loc=end_loc, regressor=False)\n\n # training MLP preceptron\n classifier = Classifier.classifier(X_train, y_train, X_test, y_test, hyperparameter=hyperparameter)\n\n # save model and prediction result\n Save_model.save_Preceptron(classifier, X_test, y_test, path=parameter_path)\n\n # evaluate MLP classifier\n if evaluation:\n confusion_matrix.confusion_matrix(classifier, X_test, y_test, target_name=None)",
"def train(self, inputs, targets, validation_data, num_epochs, regularizer_type=None):\n for k in xrange(num_epochs):\n loss = 0\n # Forward pass\n a1, probs = self._feed_forward(inputs)\n \n # Backpropagation\n dWxh, dWhy, dbh, dby = self._back_propagation(inputs, targets, a1, probs,len(inputs))\n\n # Perform the parameter update with gradient descent\n self.Wxh += -self.learning_rate * dWxh\n self.bh += -self.learning_rate * dbh\n self.Why += -self.learning_rate * dWhy\n self.by += -self.learning_rate * dby \n \n\n # validation using the validation data\n\n validation_inputs = validation_data[0]\n validation_targets = validation_data[1]\n\n print 'Validation'\n\n # Forward pass\n a1, probs = self._feed_forward(validation_inputs)\n\n # Backpropagation\n dWxh, dWhy, dbh, dby = self._back_propagation(validation_inputs, validation_targets, a1, probs,len(validation_inputs))\n\n if regularizer_type == 'L2':\n dWhy = self.reg_lambda * self.Why\n dWxh = self.reg_lambda * self.Wxh\n\n # Perform the parameter update with gradient descent\n self.Wxh += -self.learning_rate * dWxh\n self.bh += -self.learning_rate * dbh\n self.Why += -self.learning_rate * dWhy\n self.by += -self.learning_rate * dby \n\n if k%1 == 0:\n print \"Epoch \" + str(k) + \" : Loss = \" + str(self._calc_smooth_loss(loss, len(inputs), regularizer_type))\n\n #self.save('models.pkl')",
"def __init__(self, in_features, out_features):\n \n ########################\n # PUT YOUR CODE HERE #\n #######################\n\n\n self.params = {'weight': 0.0001 * np.random.randn(out_features, in_features), 'bias': np.zeros((out_features, 1))}\n self.grads = {'weight': np.zeros((out_features, in_features)), 'bias': np.zeros((out_features, 1))}\n\n\n\n ########################\n # END OF YOUR CODE #\n #######################",
"def model(X, Y, word_to_vec_map, learning_rate = 0.01, num_iterations = 400):\n \n # Get a valid word contained in the word_to_vec_map \n any_word = list(word_to_vec_map.keys())[0]\n \n # Initialize cost. It is needed during grading\n cost = 0\n \n # Define number of training examples\n m = Y.shape[0] # number of training examples\n n_y = len(np.unique(Y)) # number of classes \n n_h = word_to_vec_map[any_word].shape[0] # dimensions of the GloVe vectors \n \n # Initialize parameters using Xavier initialization\n W = np.random.randn(n_y, n_h) / np.sqrt(n_h)\n b = np.zeros((n_y,))\n \n # Convert Y to Y_onehot with n_y classes\n Y_oh = convert_to_one_hot(Y, C = n_y) \n \n # Optimization loop\n for t in range(num_iterations): # Loop over the number of iterations\n for i in range(m): # Loop over the training examples\n \n ### START CODE HERE ### (≈ 4 lines of code)\n # Average the word vectors of the words from the i'th training example\n # def sentence_to_avg(sentence, word_to_vec_map): # return avg\n avg = sentence_to_avg(X[i], word_to_vec_map)\n\n # Forward propagate the avg through the softmax layer. \n # You can use np.dot() to perform the multiplication.\n z = np.dot(W, avg) + b\n a = softmax(z)\n\n # Compute cost using the i'th training label's one hot representation and \"A\" (the output of the softmax)\n cost = - np.sum(Y_oh[i] * a)\n ### END CODE HERE ###\n \n # Compute gradients \n dz = a - Y_oh[i]\n dW = np.dot(dz.reshape(n_y,1), avg.reshape(1, n_h))\n db = dz\n\n # Update parameters with Stochastic Gradient Descent\n W = W - learning_rate * dW\n b = b - learning_rate * db\n \n if t % 100 == 0:\n print(\"Epoch: \" + str(t) + \" --- cost = \" + str(cost))\n pred = predict(X, Y, W, b, word_to_vec_map) #predict is defined in emo_utils.py\n\n return pred, W, b",
"def train_classifier(train_data, dev_data, num_iterations, learning_rate, params):\n\n for I in xrange(num_iterations):\n cum_loss = 0.0 # total loss in this iteration.\n random.shuffle(train_data)\n for label, features in train_data:\n x = feats_to_vec(features) # convert features to a vector.\n y = utils.L2I[label] # convert the label to number if needed.\n loss, grads = ll.loss_and_gradients(x,y,params)\n cum_loss += loss\n # YOUR CODE HERE\n # update the parameters according to the gradients\n # and the learning rate.\n params[0] -= learning_rate * grads[0]\n params[1] -= learning_rate * grads[1]\n train_loss = cum_loss / len(train_data)\n train_accuracy = accuracy_on_dataset(train_data, params)\n dev_accuracy = accuracy_on_dataset(dev_data, params)\n print I, train_loss, train_accuracy, dev_accuracy\n return params",
"def train(network_def, target_params, optimizer, states, actions, next_states, rewards,\n terminals, loss_weights, cumulative_gamma, target_opt, mse_inf,tau,alpha,clip_value_min, rng):\n online_params = optimizer.target\n def loss_fn(params, rng_input, target, loss_multipliers):\n def q_online(state):\n return network_def.apply(params, state, rng=rng_input)\n\n q_values = jax.vmap(q_online)(states).q_values\n q_values = jnp.squeeze(q_values)\n replay_chosen_q = jax.vmap(lambda x, y: x[y])(q_values, actions)\n \n if mse_inf:\n loss = jax.vmap(mse_loss)(target, replay_chosen_q)\n else:\n loss = jax.vmap(dqn_agent.huber_loss)(target, replay_chosen_q)\n\n mean_loss = jnp.mean(loss_multipliers * loss)\n return mean_loss, loss\n\n rng, rng2, rng3, rng4 = jax.random.split(rng, 4)\n\n def q_target(state):\n return network_def.apply(target_params, state, rng=rng2)\n\n def q_target_online(state):\n return network_def.apply(online_params, state, rng=rng4)\n\n if target_opt == 0:\n target = dqn_agent.target_q(q_target, next_states, rewards, terminals, cumulative_gamma) \n elif target_opt == 1:\n #Double DQN\n target = target_DDQN(q_target_online, q_target, next_states, rewards, terminals, cumulative_gamma)\n\n elif target_opt == 2:\n #Munchausen\n target = target_m_dqn(q_target_online, q_target, states,next_states,actions,rewards,terminals,\n cumulative_gamma,tau,alpha,clip_value_min)\n else:\n print('error')\n\n grad_fn = jax.value_and_grad(loss_fn, has_aux=True)\n (mean_loss, loss), grad = grad_fn(online_params, rng3, target, loss_weights)\n optimizer = optimizer.apply_gradient(grad)\n return optimizer, loss, mean_loss",
"def train_neural_network(session, optimizer, keep_probability, feature_batch, label_batch):\n # TODO: Implement Function \n \n session.run(optimizer,feed_dict={x:feature_batch,y:label_batch, keep_prob:keep_probability})\n \n pass",
"def train(self):\n pass",
"def train(self):\n pass",
"def train(self):\n pass",
"def train(self):\n pass",
"def train(self):\n pass",
"def train(\n *,\n workdir,\n compute_phi,\n compute_psi,\n params,\n optimal_subspace,\n num_epochs,\n learning_rate,\n key,\n method,\n lissa_kappa,\n optimizer,\n covariance_batch_size,\n main_batch_size,\n weight_batch_size,\n d,\n num_tasks,\n compute_feature_norm_on_oracle_states,\n sample_states,\n eval_states,\n use_tabular_gradient = True,\n):\n # Create an explicit weight vector (needed for explicit method only).\n if method == 'explicit':\n key, weight_key = jax.random.split(key)\n explicit_weight_matrix = jax.random.normal(\n weight_key, (d, num_tasks), dtype=jnp.float32\n )\n params['explicit_weight_matrix'] = explicit_weight_matrix\n\n if optimizer == 'sgd':\n optimizer = optax.sgd(learning_rate)\n elif optimizer == 'adam':\n optimizer = optax.adam(learning_rate)\n else:\n raise ValueError(f'Unknown optimizer {optimizer}.')\n optimizer_state = optimizer.init(params)\n\n chkpt_manager = checkpoint.Checkpoint(base_directory=_WORKDIR.value)\n initial_step, params, optimizer_state = chkpt_manager.restore_or_initialize(\n (0, params, optimizer_state)\n )\n\n writer = metric_writers.create_default_writer(\n logdir=str(workdir),\n )\n\n # Checkpointing and logging too much can use a lot of disk space.\n # Therefore, we don't want to checkpoint more than 10 times an experiment,\n # or keep more than 1k Phis per experiment.\n checkpoint_period = max(num_epochs // 10, 100_000)\n log_period = max(1_000, num_epochs // 1_000)\n\n def _checkpoint_callback(step, t, params, optimizer_state):\n del t # Unused.\n chkpt_manager.save((step, params, optimizer_state))\n\n hooks = [\n periodic_actions.PeriodicCallback(\n every_steps=checkpoint_period, callback_fn=_checkpoint_callback\n )\n ]\n\n fixed_train_kwargs = {\n 'compute_phi': compute_phi,\n 'compute_psi': compute_psi,\n 'optimizer': optimizer,\n 'method': method,\n # In the tabular case, the eval_states are all the states.\n 'oracle_states': eval_states,\n 'lissa_kappa': lissa_kappa,\n 'main_batch_size': main_batch_size,\n 'covariance_batch_size': covariance_batch_size,\n 'weight_batch_size': weight_batch_size,\n 'd': d,\n 'num_tasks': num_tasks,\n 'compute_feature_norm_on_oracle_states': (\n compute_feature_norm_on_oracle_states\n ),\n 'sample_states': sample_states,\n 'use_tabular_gradient': use_tabular_gradient,\n }\n variable_kwargs = {\n 'params': params,\n 'optimizer_state': optimizer_state,\n 'key': key,\n }\n\n @jax.jit\n def _eval_step(phi_params):\n eval_phi = compute_phi(phi_params, eval_states)\n eval_psi = compute_psi(eval_states) # pytype: disable=wrong-arg-count\n\n metrics = compute_metrics(eval_phi, optimal_subspace)\n metrics |= {'frob_norm': utils.outer_objective_mc(eval_phi, eval_psi)}\n return metrics\n\n # Perform num_epochs gradient steps.\n with metric_writers.ensure_flushes(writer):\n for step in etqdm.tqdm(\n range(initial_step + 1, num_epochs + 1),\n initial=initial_step,\n total=num_epochs,\n ):\n variable_kwargs = _train_step(**fixed_train_kwargs, **variable_kwargs)\n\n if step % log_period == 0:\n metrics = _eval_step(variable_kwargs['params']['phi_params'])\n writer.write_scalars(step, metrics)\n\n for hook in hooks:\n hook(\n step,\n params=variable_kwargs['params'],\n optimizer_state=variable_kwargs['optimizer_state'],\n )\n\n writer.flush()",
"def _train(self):\n training_environment = self._training_environment\n evaluation_environment = self._evaluation_environment\n policy = self._policy\n pool = self._pool\n\n if not self._training_started:\n self._init_training()\n\n self._initial_exploration_hook(\n training_environment, self._initial_exploration_policy, pool)\n\n self.sampler.initialize(training_environment, policy, pool)\n\n gt.reset_root()\n gt.rename_root('RLAlgorithm')\n gt.set_def_unique(False)\n\n self._training_before_hook()\n\n for self._epoch in gt.timed_for(range(self._epoch, self._n_epochs)):\n self._epoch_before_hook()\n gt.stamp('epoch_before_hook')\n\n start_samples = self.sampler._total_samples\n for i in count():\n samples_now = self.sampler._total_samples\n self._timestep = samples_now - start_samples\n\n if (samples_now >= start_samples + self._epoch_length\n and self.ready_to_train):\n break\n\n self._timestep_before_hook()\n gt.stamp('timestep_before_hook')\n\n self._do_sampling(timestep=self._total_timestep)\n gt.stamp('sample')\n\n if self.ready_to_train:\n self._do_training_repeats(timestep=self._total_timestep)\n gt.stamp('train')\n\n self._timestep_after_hook()\n gt.stamp('timestep_after_hook')\n\n training_paths = self.sampler.get_last_n_paths(math.ceil(self._epoch_length / self.sampler._max_path_length))\n gt.stamp('training_paths')\n evaluation_paths = self._evaluation_paths(policy, evaluation_environment)\n gt.stamp('evaluation_paths')\n\n training_metrics = self._evaluate_rollouts(training_paths, training_environment)\n gt.stamp('training_metrics')\n if evaluation_paths:\n evaluation_metrics = self._evaluate_rollouts(\n evaluation_paths, evaluation_environment)\n gt.stamp('evaluation_metrics')\n else:\n evaluation_metrics = {}\n\n self._epoch_after_hook(training_paths)\n gt.stamp('epoch_after_hook')\n\n sampler_diagnostics = self.sampler.get_diagnostics()\n\n diagnostics = self.get_diagnostics(\n iteration=self._total_timestep,\n batch=self._evaluation_batch(),\n training_paths=training_paths,\n evaluation_paths=evaluation_paths)\n\n time_diagnostics = gt.get_times().stamps.itrs\n\n diagnostics.update(OrderedDict((\n *(\n (f'evaluation/{key}', evaluation_metrics[key])\n for key in sorted(evaluation_metrics.keys())\n ),\n *(\n (f'training/{key}', training_metrics[key])\n for key in sorted(training_metrics.keys())\n ),\n *(\n (f'times/{key}', time_diagnostics[key][-1])\n for key in sorted(time_diagnostics.keys())\n ),\n *(\n (f'sampler/{key}', sampler_diagnostics[key])\n for key in sorted(sampler_diagnostics.keys())\n ),\n ('epoch', self._epoch),\n ('timestep', self._timestep),\n ('timesteps_total', self._total_timestep),\n ('train-steps', self._num_train_steps),\n )))\n\n if self._eval_render_kwargs and hasattr(\n evaluation_environment, 'render_rollouts'):\n # TODO(hartikainen): Make this consistent such that there's no\n # need for the hasattr check.\n training_environment.render_rollouts(evaluation_paths)\n\n yield diagnostics\n\n self.sampler.terminate()\n\n self._training_after_hook()\n\n yield {'done': True, **diagnostics}",
"def main():\n args = parameter_parser()\n tab_printer(args)\n trainer = GPNTrainer(args)\n # trainer.fit()\n \"\"\"\n Scoring on the prediction and learning ability.\n \"\"\"\n trainer.score()\n \"\"\"\n Scoring on the subgraph test set.\n \"\"\"\n # trainer.score2()\n \"\"\"\n Scoring on the generalization ability.\n \"\"\"\n # trainer.score3()\n \"\"\"\n Finetuning for downstream tasks.\n \"\"\"\n # model = finetune_GPN(args, trainer.number_of_labels)\n # model.finetune()",
"def main(_):\n hps = LM.get_default_hparams().parse(FLAGS.hpconfig)\n hps._set(\"num_gpus\", FLAGS.num_gpus)\n print ('*****HYPER PARAMETERS*****')\n print (hps)\n print ('**************************')\n\n vocab = Vocabulary.from_file(os.path.join(FLAGS.datadir, \"vocabulary.txt\"))\n\n if FLAGS.mode == \"train\":\n #hps.batch_size = 256\n dataset = Dataset(vocab, os.path.join(FLAGS.datadir, \"train.txt\"))\n run_train(dataset, hps, os.path.join(FLAGS.logdir, \"train\"), ps_device=\"/gpu:0\")\n elif FLAGS.mode.startswith(\"eval\"):\n data_dir = os.path.join(FLAGS.datadir, \"eval.txt\")\n #predict_model = prediction.Model('/dir/ckpt',os.path.join(FLAGS.datadir, \"vocabulary.txt\"), hps)\n\n dataset = Dataset(vocab, data_dir, deterministic=True)\n prefix_words = \"<brk>\".split()\n predict_model = predict.Model(hps, FLAGS.logdir, FLAGS.datadir)\n print ('start input')\n out = predict_model.predictnextkwords(prefix_words, FLAGS.num_sen)\n for row in out:\n print(' '.join(row) + \"\\n\")\n print(\"len_out: \" + str(len(out)))\n #prediction.topkwords(prefix_words, dataset, hps, FLAGS.logdir, FLAGS.mode)\n #sentence_ppl(prefix_words,dataset, hps, FLAGS.logdir, FLAGS.mode)\n #print vocab\n #dataset = Dataset(vocab, os.path.join(FLAGS.datadir, \"eval.txt\"))\n #run_eval(dataset, hps, FLAGS.logdir, FLAGS.mode, FLAGS.eval_steps)",
"def _hg_model_fn(features, labels, mode, params):\n is_training = (mode == tf.estimator.ModeKeys.TRAIN)\n weight_decay = params.weight_decay\n momentum = params.momentum\n decay_factor = params.decay_factor\n decay_step = params.decay_step\n init_learning_rate = params.init_learning_rate\n num_stacks = params.num_stacks\n num_joints = params.num_joints\n\n tower_features = features\n if mode == tf.estimator.ModeKeys.PREDICT:\n if num_gpus < 1:\n tower_labels = [None]\n else:\n tower_labels = [None for i in range(num_gpus)]\n else:\n tower_labels = labels\n\n tower_losses = []\n tower_gradvars = []\n tower_preds = []\n\n # channels first (NCHW) is normally optimal on GPU and channels last (NHWC)\n # on CPU. The exception is Intel MKL on CPU which is optimal with\n # channels_last.\n data_format = params.data_format\n if not data_format:\n if num_gpus == 0:\n data_format = 'channels_last'\n else:\n data_format = 'channels_first'\n\n if num_gpus == 0:\n num_devices = 1\n device_type = 'cpu'\n else:\n num_devices = num_gpus\n device_type = 'gpu'\n\n for i in range(num_devices):\n worker_device = '/{}:{}'.format(device_type, i)\n if variable_strategy == 'CPU':\n device_setter = utils.local_device_setter(\n worker_device=worker_device)\n elif variable_strategy == 'GPU':\n device_setter = utils.local_device_setter(\n ps_device_type='gpu',\n worker_device=worker_device,\n ps_strategy=tf.contrib.training.GreedyLoadBalancingStrategy(\n num_gpus, tf.contrib.training.byte_size_load_fn))\n if mode == tf.estimator.ModeKeys.TRAIN:\n batch_size = params.train_batch_size / num_devices\n else:\n batch_size = params.eval_batch_size / num_devices\n\n with tf.variable_scope('hg', reuse=bool(i != 0)):\n with tf.name_scope('tower_%d' % i) as name_scope:\n with tf.device(device_setter):\n loss, gradvars, preds = _tower_fn(\n mode, weight_decay, tower_features[i][0], tower_labels[i],\n data_format, params.batch_norm_decay,\n params.batch_norm_epsilon, params.num_stacks, params.num_out, params.n_low, params.num_joints, batch_size,params.seq_length)\n tower_losses.append(loss)\n tower_gradvars.append(gradvars)\n tower_preds.append(preds)\n if i == 0:\n # Only trigger batch_norm moving mean and variance update from\n # the 1st tower. Ideally, we should grab the updates from all\n # towers but these stats accumulate extremely fast so we can\n # ignore the other stats from the other towers without\n # significant detriment.\n update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS,\n name_scope)\n\n if mode == tf.estimator.ModeKeys.TRAIN or mode == tf.estimator.ModeKeys.EVAL:\n\n # Now compute global loss and gradients.\n gradvars = []\n with tf.name_scope('gradient_averaging'):\n all_grads = {}\n for grad, var in itertools.chain(*tower_gradvars):\n if grad is not None:\n all_grads.setdefault(var, []).append(grad)\n for var, grads in six.iteritems(all_grads):\n # Average gradients on the same device as the variables\n # to which they apply.\n with tf.device(var.device):\n if len(grads) == 1:\n avg_grad = grads[0]\n else:\n avg_grad = tf.multiply(tf.add_n(grads), 1. / len(grads))\n gradvars.append((avg_grad, var))\n\n # Device that runs the ops to apply global gradient updates.\n consolidation_device = '/gpu:0' if variable_strategy == 'GPU' else '/cpu:0'\n with tf.device(consolidation_device):\n\n learning_rate = tf.train.exponential_decay(init_learning_rate, tf.train.get_global_step(), decay_step, decay_factor, staircase=True, name= 'learning_rate')\n\n loss = tf.reduce_mean(tower_losses, name='loss')\n\n examples_sec_hook = utils.ExamplesPerSecondHook(\n params.train_batch_size, every_n_steps=10)\n\n tensors_to_log = {'learning_rate': learning_rate, 'loss': loss}\n\n logging_hook = tf.train.LoggingTensorHook(\n tensors=tensors_to_log, every_n_iter=100)\n\n train_hooks = [logging_hook, examples_sec_hook]\n\n optimizer = tf.train.RMSPropOptimizer(learning_rate=learning_rate)\n\n if params.sync:\n optimizer = tf.train.SyncReplicasOptimizer(\n optimizer, replicas_to_aggregate=num_workers)\n sync_replicas_hook = optimizer.make_session_run_hook(params.is_chief)\n train_hooks.append(sync_replicas_hook)\n\n # Create single grouped train op\n train_op = [\n optimizer.apply_gradients(\n gradvars, global_step=tf.train.get_global_step())\n ]\n \n train_op.extend(update_ops)\n train_op = tf.group(*train_op)\n\n predictions = {\n 'heatmaps':\n tf.concat([p['heatmaps'] for p in tower_preds], axis=0),\n 'images':\n tf.concat([i for i in tower_features], axis=0)\n }\n if mode==tf.estimator.ModeKeys.EVAL:\n hm = predictions['heatmaps']\n stacked_labels = tf.concat(labels[0][0][0], axis=0)\n \n gt_labels = tf.transpose(stacked_labels,[1,0,3,4,2])\n\n joint_accur = []\n for j in range(params.seq_length):\n for i in range(params.num_joints):\n joint_accur.append(_pck_hm(hm[j,:,-1, :, :,i], gt_labels[j,:, :, :, i], params.eval_batch_size/num_devices))\n accuracy = tf.stack(joint_accur)\n metrics = {'Mean Pixel Error': tf.metrics.mean(accuracy)}\n tf.logging.info('Accuracy op computed')\n else:\n metrics = None\n \n else:\n train_op = None\n loss = None\n train_hooks = None\n metrics = None\n predictions = {\n 'heatmaps':\n tf.concat([p['heatmaps'] for p in tower_preds], axis=0),\n 'images':\n tf.concat([i for i in tower_features], axis=0)\n }\n \n return tf.estimator.EstimatorSpec(\n mode=mode,\n predictions=predictions,\n loss=loss,\n train_op=train_op,\n training_hooks=train_hooks,\n eval_metric_ops=metrics)",
"def train_and_eval(config, babas_data):\n\n if config.resume_from_checkpoint is not None:\n try:\n if config.augment_background == 'background':\n bg = config.augment_background\n else:\n bg = None\n rfc = config.resume_from_checkpoint\n ic = config.include_validation\n print 'Loading saved config: %s' % config.saved_config\n config = np.load(config.saved_config).item()\n config.resume_from_checkpoint = rfc\n config.include_validation = ic\n if not hasattr(config, 'augment_background'):\n config.augment_background = 'constant'\n if not hasattr(config, 'background_folder'):\n config.background_folder = 'backgrounds'\n if bg is not None:\n print 'Overriding saved config to add kinect backgrounds to training.'\n config.augment_background = bg\n results_dir = rfc\n except:\n print 'Relying on default config file.'\n\n if babas_data: # Shitty naive training method\n config.tfrecord_dir = '/media/data_cifs/monkey_tracking/data_for_babas/tfrecords_from_babas'\n config.babas_tfrecord_dir = config.tfrecord_dir\n config.steps_before_validation = 20\n config.epochs = 2000\n config.convert_labels_to_pixel_space = False\n config.augment_background = 'constant'\n\n # Import your model\n print 'Model directory: %s' % config.model_output\n print 'Running model: %s' % config.model_type\n model_file = import_cnn(config.model_type)\n\n # Prepare model training\n dt_stamp = re.split(\n '\\.', str(datetime.now()))[0].\\\n replace(' ', '_').replace(':', '_').replace('-', '_')\n dt_dataset = '%s_%s' % (config.model_type, dt_stamp)\n if config.selected_joints is not None:\n dt_dataset = '_%s' % (config.selected_joints) + dt_dataset\n config.train_checkpoint = os.path.join(\n config.model_output, dt_dataset) # timestamp this run\n config.summary_dir = os.path.join(\n config.train_summaries, dt_dataset)\n results_dir = os.path.join(config.npy_dir, dt_dataset)\n print 'Saving Dmurphy\\'s online updates to: %s' % results_dir\n dir_list = [config.train_checkpoint, config.summary_dir, results_dir]\n [tf_fun.make_dir(d) for d in dir_list]\n\n # Prepare model inputs\n train_data = os.path.join(config.tfrecord_dir, config.train_tfrecords)\n if config.babas_tfrecord_dir is not None:\n train_babas_tfrecord_dir = os.path.join(\n config.babas_tfrecord_dir,\n config.train_tfrecords)\n if config.include_validation or config.include_validation is None:\n val_babas_tfrecord_dir = os.path.join(\n config.babas_tfrecord_dir,\n config.val_tfrecords)\n else:\n train_babas_tfrecord_dir = None\n val_babas_tfrecord_dir = None\n\n if isinstance(config.include_validation, basestring):\n validation_data = config.include_validation\n elif config.include_validation == True:\n validation_data = os.path.join(\n config.tfrecord_dir,\n config.val_tfrecords)\n else:\n validation_data = None\n\n print 'Using training set: %s' % train_data\n print 'Using validation set: %s' % validation_data\n\n # Prepare data on CPU\n with tf.device('/cpu:0'):\n train_data_dict = inputs(\n tfrecord_file=train_data,\n batch_size=config.train_batch,\n im_size=config.resize,\n target_size=config.image_target_size,\n model_input_shape=config.resize,\n train=config.data_augmentations,\n label_shape=config.num_classes,\n num_epochs=config.epochs,\n image_target_size=config.image_target_size,\n image_input_size=config.image_input_size,\n maya_conversion=config.maya_conversion,\n max_value=config.max_depth,\n normalize_labels=config.normalize_labels,\n aux_losses=config.aux_losses,\n selected_joints=config.selected_joints,\n joint_names=config.joint_order,\n num_dims=config.num_dims,\n keep_dims=config.keep_dims,\n mask_occluded_joints=config.mask_occluded_joints,\n background_multiplier=config.background_multiplier,\n augment_background=config.augment_background,\n background_folder=config.background_folder,\n randomize_background=config.randomize_background,\n maya_joint_labels=config.labels,\n babas_tfrecord_dir=train_babas_tfrecord_dir,\n convert_labels_to_pixel_space=config.convert_labels_to_pixel_space,\n image_target_size_is_flipped=config.image_target_size_is_flipped)\n train_data_dict['deconv_label_size'] = len(config.labels)\n\n val_data_dict = inputs(\n tfrecord_file=validation_data,\n batch_size=config.validation_batch,\n im_size=config.resize,\n target_size=config.image_target_size,\n model_input_shape=config.resize,\n train=config.data_augmentations,\n label_shape=config.num_classes,\n num_epochs=config.epochs,\n image_target_size=config.image_target_size,\n image_input_size=config.image_input_size,\n maya_conversion=config.maya_conversion,\n max_value=config.max_depth,\n normalize_labels=config.normalize_labels,\n aux_losses=config.aux_losses,\n selected_joints=config.selected_joints,\n joint_names=config.joint_order,\n num_dims=config.num_dims,\n keep_dims=config.keep_dims,\n mask_occluded_joints=config.mask_occluded_joints,\n background_multiplier=config.background_multiplier,\n augment_background='none',\n background_folder=config.background_folder,\n randomize_background=None,\n maya_joint_labels=config.labels,\n babas_tfrecord_dir=val_babas_tfrecord_dir,\n convert_labels_to_pixel_space=config.convert_labels_to_pixel_space,\n image_target_size_is_flipped=config.image_target_size_is_flipped)\n val_data_dict['deconv_label_size'] = len(config.labels)\n\n # Check output_shape\n if config.selected_joints is not None:\n print 'Targeting joint: %s' % config.selected_joints\n joint_shape = len(config.selected_joints) * config.keep_dims\n if (config.num_classes // config.keep_dims) > (joint_shape):\n print 'New target size: %s' % joint_shape\n config.num_classes = joint_shape\n\n with tf.device('/gpu:0'):\n with tf.variable_scope('cnn') as scope:\n print 'Creating training graph:'\n model = model_file.model_struct(\n weight_npy_path=config.weight_npy_path)\n train_mode = tf.get_variable(name='training', initializer=True)\n model.build(\n rgb=train_data_dict['image'],\n target_variables=train_data_dict,\n train_mode=train_mode,\n batchnorm=config.batch_norm)\n train_mu, train_var = tf.nn.moments(train_data_dict['image'], axes=[1, 2, 3])\n tf.summary.histogram(\"train image mean\", train_mu)\n tf.summary.histogram(\"train image std\", tf.sqrt(train_var))\n if 'deconv_image' in config.aux_losses:\n tf.summary.image('Deconv train', model.deconv)\n if 'deconv_label' in config.aux_losses:\n tf.summary.image(\n 'Deconv label train',\n tf.expand_dims(\n tf.cast(\n tf.argmax(model.deconv, axis=3), tf.float32), 3))\n\n # Setup validation op\n if validation_data is not False:\n scope.reuse_variables()\n print 'Creating validation graph:'\n val_model = model_file.model_struct()\n val_model.build(\n rgb=val_data_dict['image'],\n target_variables=val_data_dict)\n\n # Calculate validation accuracy\n val_mu, val_var = tf.nn.moments(val_data_dict['image'], axes=[1, 2, 3])\n tf.summary.histogram(\"validation image mean\", val_mu)\n tf.summary.histogram(\"validation image std\", tf.sqrt(val_var))\n if 'label' in val_data_dict.keys():\n # val_score = tf.reduce_mean(\n # tf_fun.l2_loss(\n # val_model.output, val_data_dict['label']))\n if config.keep_dims == 3:\n z_mask = tf.expand_dims(tf.tile([1, 1, 0], [int(val_data_dict['label'].get_shape()[-1]) // 3]), axis=0)\n z_mask = tf.cast(z_mask, tf.float32)\n val_model.output = val_model.output * z_mask\n val_data_dict['label'] = val_data_dict['label'] * z_mask \n val_score = tf.reduce_mean(tf.nn.l2_loss(val_model.output - val_data_dict['label']))\n tf.summary.scalar(\"validation mse\", val_score)\n if 'fc' in config.aux_losses:\n tf.summary.image('FC val activations', val_model.final_fc)\n if 'deconv_image' in config.aux_losses:\n tf.summary.image('Deconv val', val_model.deconv)\n if 'deconv_label' in config.aux_losses:\n tf.summary.image(\n 'Deconv label train',\n tf.expand_dims(\n tf.cast(\n tf.argmax(val_model.deconv, axis=3),\n tf.float32), 3))\n tf.summary.image(\n 'validation images',\n tf.cast(val_data_dict['image'], tf.float32))\n\n # Prepare the loss functions:::\n loss_list, loss_label = [], []\n if 'label' in train_data_dict.keys():\n # 1. Joint localization loss\n if config.calculate_per_joint_loss == 'thomas':\n label_loss, use_joints, joint_variance = tf_fun.thomas_l1_loss(\n model=model,\n train_data_dict=train_data_dict,\n config=config,\n y_key='label',\n yhat_key='output')\n loss_list += [label_loss]\n elif config.calculate_per_joint_loss == 'skeleton':\n label_loss = tf_fun.skeleton_loss(\n model=model,\n train_data_dict=train_data_dict,\n config=config,\n y_key='label',\n yhat_key='output')\n loss_list += [label_loss]\n elif config.calculate_per_joint_loss == 'skeleton and joint':\n label_loss = tf_fun.skeleton_loss(\n model=model,\n train_data_dict=train_data_dict,\n config=config,\n y_key='label',\n yhat_key='output')\n loss_list += [label_loss]\n loss_label += ['skeleton loss']\n delta = model['output'] - train_data_dict['label']\n proc_weights = np.asarray(\n config.dim_weight)[None,:].repeat(\n len(config.joint_names), axis=0).reshape(1, -1)\n delta *= proc_weights\n # label_loss, use_joints, joint_variance = tf_fun.thomas_l1_loss(\n # model=model,\n # train_data_dict=train_data_dict,\n # config=config,\n # y_key='label',\n # yhat_key='output')\n # loss_list += [label_loss]\n loss_list += [tf.nn.l2_loss(\n model['output'] - train_data_dict['label'])]\n else:\n loss_list += [tf.nn.l2_loss(\n model['output'] - train_data_dict['label'])]\n loss_label += ['combined head']\n for al in loss_helper.potential_aux_losses():\n loss_list, loss_label = loss_helper.get_aux_losses(\n loss_list=loss_list,\n loss_label=loss_label,\n train_data_dict=train_data_dict,\n model=model,\n aux_loss_dict=al,\n domain_adaptation=train_babas_tfrecord_dir)\n loss = tf.add_n(loss_list)\n\n # Add wd if necessary\n if config.wd_penalty is not None:\n _, l2_wd_layers = tf_fun.fine_tune_prepare_layers(\n tf.trainable_variables(), config.wd_layers)\n l2_wd_layers = [\n x for x in l2_wd_layers if 'biases' not in x.name]\n if config.wd_type == 'l1':\n loss += (config.wd_penalty * tf.add_n(\n [tf.reduce_sum(tf.abs(x)) for x in l2_wd_layers]))\n elif config.wd_type == 'l2':\n loss += (config.wd_penalty * tf.add_n(\n [tf.nn.l2_loss(x) for x in l2_wd_layers]))\n\n optimizer = loss_helper.return_optimizer(config.optimizer)\n optimizer = optimizer(config.lr)\n\n if hasattr(config, 'fine_tune_layers') and config.fine_tune_layers is not None:\n print 'Finetuning learning for: %s' % config.fine_tune_layers\n train_op, grads = tf_fun.finetune_learning(\n loss,\n trainables=tf.trainable_variables(),\n fine_tune_layers=config.fine_tune_layers,\n config=config\n )\n else:\n # Op to calculate every variable gradient\n grads = optimizer.compute_gradients(\n loss, tf.trainable_variables())\n # Op to update all variables according to their gradient\n train_op = optimizer.apply_gradients(\n grads_and_vars=grads)\n\n # Summarize all gradients and weights\n [tf.summary.histogram(\n var.name + '/gradient', grad)\n for grad, var in grads if grad is not None]\n # train_op = optimizer.minimize(loss)\n\n # Summarize losses\n [tf.summary.scalar(lab, il) for lab, il in zip(\n loss_label, loss_list)]\n\n # Summarize images and l1 weights\n tf.summary.image(\n 'train images',\n tf.cast(train_data_dict['image'], tf.float32))\n tf_fun.add_filter_summary(\n trainables=tf.trainable_variables(),\n target_layer='conv1_1_filters')\n\n # Set up summaries and saver\n saver = tf.train.Saver(\n tf.global_variables(), max_to_keep=config.keep_checkpoints)\n summary_op = tf.summary.merge_all()\n tf.add_to_collection('output', model.output)\n\n # Initialize the graph\n sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True))\n\n # Need to initialize both of these if supplying num_epochs to inputs\n sess.run(tf.group(tf.global_variables_initializer(),\n tf.local_variables_initializer()))\n summary_writer = tf.summary.FileWriter(config.summary_dir, sess.graph)\n\n # Set up exemplar threading\n coord = tf.train.Coordinator()\n threads = tf.train.start_queue_runners(sess=sess, coord=coord)\n\n # Create list of variables to run through training model\n train_session_vars = {\n 'train_op': train_op,\n 'loss_value': loss,\n 'im': train_data_dict['image'],\n 'yhat': model.output,\n 'ytrue': train_data_dict['label']\n }\n if hasattr(model, 'deconv'):\n train_session_vars['deconv'] = model.deconv\n if hasattr(model, 'final_fc'):\n train_session_vars['fc'] = model.final_fc\n\n # Create list of variables to run through validation model\n val_session_vars = {\n 'val_acc': val_score,\n 'val_pred': val_model.output,\n 'val_ims': val_data_dict['image'],\n 'val_true': val_data_dict['label'],\n }\n\n # Create list of variables to save to numpys\n save_training_vars = [\n 'im',\n 'yhat',\n 'ytrue',\n 'yhat'\n ]\n\n for al in loss_helper.potential_aux_losses():\n if al.keys()[0] in train_data_dict.keys():\n y_key = '%s' % al.keys()[0]\n train_session_vars[y_key] = train_data_dict[al.values()[0]['y_name']]\n save_training_vars += [y_key]\n\n yhat_key = '%s_hat' % al.keys()[0]\n train_session_vars[yhat_key] = model[al.values()[0]['model_name']]\n save_training_vars += [yhat_key]\n\n # Start training loop\n np.save(config.train_checkpoint, config)\n step, losses = 0, []\n num_joints = int(\n train_data_dict['label'].get_shape()[-1]) // config.keep_dims\n normalize_vec = tf_fun.get_normalization_vec(config, num_joints)\n if config.resume_from_checkpoint is not None:\n if '.ckpt' in config.resume_from_checkpoint:\n ckpt = config.resume_from_checkpoint\n 'Restoring specified checkpoint: %s' % config.resume_from_checkpoint\n else:\n ckpt = tf.train.latest_checkpoint(config.resume_from_checkpoint)\n print 'Evaluating checkpoint: %s' % ckpt\n saver.restore(sess, ckpt)\n try:\n while not coord.should_stop():\n start_time = time.time()\n train_out_dict = sess.run(train_session_vars.values())\n train_out_dict = {k: v for k, v in zip(\n train_session_vars.keys(), train_out_dict)}\n losses.append(train_out_dict['loss_value'])\n duration = time.time() - start_time\n assert not np.isnan(\n train_out_dict['loss_value']), 'Model diverged with loss = NaN'\n if step % config.steps_before_validation == 0:\n if validation_data is not False:\n val_out_dict = sess.run(\n val_session_vars.values())\n val_out_dict = {k: v for k, v in zip(\n val_session_vars.keys(), val_out_dict)}\n # if config.normalize_labels:\n # val_out_dict['val_pred'] *= normalize_vec\n # val_out_dict['val_true'] *= normalize_vec\n np.savez(\n os.path.join(\n results_dir, '%s_val_coors' % step),\n val_pred=val_out_dict['val_pred'],\n val_ims=val_out_dict['val_ims'],\n val_true=val_out_dict['val_true'],\n normalize_vec=normalize_vec)\n with open(\n os.path.join(\n results_dir, '%s_config.p' % step), 'wb') as fp:\n pickle.dump(config, fp)\n\n # Summaries\n summary_str = sess.run(summary_op)\n summary_writer.add_summary(summary_str, step)\n\n # Training status and validation accuracy attach 9177\n format_str = (\n '%s: step %d, loss = %.8f (%.1f examples/sec; '\n '%.3f sec/batch) | '\n 'Validation l2 loss = %s | logdir = %s')\n print (format_str % (\n datetime.now(), step, train_out_dict['loss_value'],\n config.train_batch / duration, float(duration),\n val_out_dict['val_acc'],\n config.summary_dir))\n\n # Save the model checkpoint if it's the best yet\n if config.normalize_labels:\n train_out_dict['yhat'] *= normalize_vec\n train_out_dict['ytrue'] *= normalize_vec\n [save_training_data(\n output_dir=results_dir,\n data=train_out_dict[k],\n name='%s_%s' % (k, step)) for k in save_training_vars]\n saver.save(\n sess, os.path.join(\n config.train_checkpoint,\n 'model_' + str(step) + '.ckpt'), global_step=step)\n\n else:\n # Training status\n format_str = ('%s: step %d, loss = %.8f (%.1f examples/sec; '\n '%.3f sec/batch)')\n print (format_str % (\n datetime.now(),\n step,\n train_out_dict['loss_value'],\n config.train_batch / duration,\n float(duration)))\n # End iteration\n step += 1\n\n except tf.errors.OutOfRangeError:\n print('Done training for %d epochs, %d steps.' % (config.epochs, step))\n finally:\n coord.request_stop()\n dt_stamp = get_dt() # date-time stamp\n np.save(\n os.path.join(\n config.tfrecord_dir, '%s_training_loss' % dt_stamp), losses)\n coord.join(threads)\n sess.close()",
"def train_step(self, batch):\n user, pos, neg = batch\n with tf.GradientTape() as t:\n\n # Clean Inference\n xu_pos, gamma_u, gamma_pos, emb_pos_feature, theta_u, beta_pos = \\\n self(inputs=(user, pos), training=True)\n xu_neg, _, gamma_neg, _, _, beta_neg = self(inputs=(user, neg), training=True)\n\n result = tf.clip_by_value(xu_pos - xu_neg, -80.0, 1e8)\n loss = tf.reduce_sum(tf.nn.softplus(-result))\n\n # Regularization Component\n reg_loss = self.reg * tf.reduce_sum([tf.nn.l2_loss(gamma_u),\n tf.nn.l2_loss(gamma_pos),\n tf.nn.l2_loss(gamma_neg),\n tf.nn.l2_loss(theta_u)]) * 2 \\\n + self.reg * tf.nn.l2_loss(beta_pos) * 2 \\\n + self.reg * tf.nn.l2_loss(beta_neg) * 2 / 10 \\\n + self.reg * tf.reduce_sum([tf.nn.l2_loss(self.E), tf.nn.l2_loss(self.Bp)]) * 2\n\n # Loss to be optimized\n loss += reg_loss\n\n params = [\n self.Bi,\n self.Gu,\n self.Gi,\n self.Tu,\n self.E,\n self.Bp\n ]\n\n grads = t.gradient(loss, params)\n self.optimizer.apply_gradients(zip(grads, params))\n\n return loss.numpy()",
"def train(network, online_params, target_params, optimizer, optimizer_state,\n states, actions, next_states, rewards, terminals, num_tau_samples,\n num_tau_prime_samples, num_quantile_samples, cumulative_gamma,\n double_dqn, kappa, rng, coherence_weight, option, use_ortho_loss,\n use_cohe_loss, tau, alpha, clip_value_min):\n def loss_fn(params, rng_input, target_quantile_vals):\n def online(state):\n return network.apply(params, state, num_quantiles=num_tau_samples,\n rng=rng_input)\n\n model_output = jax.vmap(online)(states)\n quantile_values = model_output.quantile_values\n quantiles = model_output.quantiles\n representations = model_output.representation\n representations = jnp.squeeze(representations)\n chosen_action_quantile_values = jax.vmap(lambda x, y: x[:, y][:, None])(\n quantile_values, actions)\n # Shape of bellman_erors and huber_loss:\n # batch_size x num_tau_prime_samples x num_tau_samples x 1.\n bellman_errors = (target_quantile_vals[:, :, None, :] -\n chosen_action_quantile_values[:, None, :, :])\n # The huber loss (see Section 2.3 of the paper) is defined via two cases:\n # case_one: |bellman_errors| <= kappa\n # case_two: |bellman_errors| > kappa\n huber_loss_case_one = (\n (jnp.abs(bellman_errors) <= kappa).astype(jnp.float32) *\n 0.5 * bellman_errors ** 2)\n huber_loss_case_two = (\n (jnp.abs(bellman_errors) > kappa).astype(jnp.float32) *\n kappa * (jnp.abs(bellman_errors) - 0.5 * kappa))\n huber_loss = huber_loss_case_one + huber_loss_case_two\n # Tile by num_tau_prime_samples along a new dimension. Shape is now\n # batch_size x num_tau_prime_samples x num_tau_samples x 1.\n # These quantiles will be used for computation of the quantile huber loss\n # below (see section 2.3 of the paper).\n quantiles = jnp.tile(quantiles[:, None, :, :],\n [1, num_tau_prime_samples, 1, 1]).astype(jnp.float32)\n # Shape: batch_size x num_tau_prime_samples x num_tau_samples x 1.\n quantile_huber_loss = (jnp.abs(quantiles - jax.lax.stop_gradient(\n (bellman_errors < 0).astype(jnp.float32))) * huber_loss) / kappa\n # Sum over current quantile value (num_tau_samples) dimension,\n # average over target quantile value (num_tau_prime_samples) dimension.\n # Shape: batch_size x num_tau_prime_samples x 1.\n quantile_huber_loss = jnp.sum(quantile_huber_loss, axis=2)\n quantile_huber_loss = jnp.mean(quantile_huber_loss, axis=1)\n if use_ortho_loss and use_cohe_loss:\n coherence_loss = coherence_utils.orthogonal_features_coherence(\n representations, option)\n cosine_similarity = coherence_utils.orthogonality(representations)\n orthogonality_loss = jnp.mean(\n jnp.abs(cosine_similarity - jnp.eye(representations.shape[0])))\n if use_ortho_loss and not use_cohe_loss:\n coherence_loss = 0.\n cosine_similarity = coherence_utils.orthogonality(representations)\n orthogonality_loss = jnp.mean(\n jnp.abs(cosine_similarity - jnp.eye(representations.shape[0])))\n if use_cohe_loss and not use_ortho_loss:\n coherence_loss = coherence_utils.orthogonal_features_coherence(\n representations, option)\n cosine_similarity = coherence_utils.orthogonality(representations)\n orthogonality_loss = 0.\n loss = ((1. - coherence_weight) * quantile_huber_loss + coherence_weight *\n (coherence_loss + orthogonality_loss))\n return jnp.mean(loss), (jnp.mean(quantile_huber_loss), coherence_loss,\n orthogonality_loss)\n\n if tau is None:\n rng, target_quantile_vals, _, _ = target_quantile_values(\n network,\n online_params,\n target_params,\n states,\n next_states,\n rewards,\n terminals,\n num_tau_prime_samples,\n num_quantile_samples,\n cumulative_gamma,\n double_dqn,\n rng)\n else:\n rng, target_quantile_vals, _, _ = (\n munchausen_target_quantile_values(\n network,\n target_params,\n states,\n actions,\n next_states,\n rewards,\n terminals,\n num_tau_prime_samples,\n num_quantile_samples,\n cumulative_gamma,\n rng,\n tau,\n alpha,\n clip_value_min))\n grad_fn = jax.value_and_grad(loss_fn, has_aux=True)\n rng, rng_input = jax.random.split(rng)\n all_losses, grad = grad_fn(online_params, rng_input, target_quantile_vals)\n loss, component_losses = all_losses\n quantile_loss, coherence_loss, orthogonality_loss = component_losses\n updates, optimizer_state = optimizer.update(grad, optimizer_state)\n online_params = optax.apply_updates(online_params, updates)\n return (rng, optimizer_state, online_params, loss, quantile_loss,\n coherence_loss, orthogonality_loss)",
"def run_model(LinearSVM_params, category):\n \n # Unload common parameters\n config = LinearSVM_params['configs'][category] if LinearSVM_params['configs'] else None\n verbose = LinearSVM_params['verbose']\n warning = LinearSVM_params['warning']\n stats_path = LinearSVM_params['stats_path']\n\n model_name = LinearSVM_params['model_name']\n print(f'Running model {model_name}')\n\n # Unload the training data specific parameters\n train_size = LinearSVM_params['train_size']\n active_learning_iter = LinearSVM_params['active_learning_iter']\n cross_validation = LinearSVM_params['cross_validate']\n full = LinearSVM_params['full_dataset']\n active_learning = LinearSVM_params['active_learning']\n w_hx = LinearSVM_params['with_historical_data']\n w_k = LinearSVM_params['with_k']\n\n # Specify the desired operation\n fine_tuning = LinearSVM_params['fine_tuning']\n save_model = LinearSVM_params['save_model']\n to_params = True\n\n if fine_tuning:\n class_weights = [{0: i, 1: 1.0-i} for i in np.linspace(.1, .9, num=9)]\n class_weights.append('balanced')\n class_weights.append(None)\n\n ft_params = {\n # 'penalty': ['l1', 'l2'],\n 'penalty': ['l1'],\n # 'loss': ['hinge', 'squared_hinge'],\n 'loss': ['squared_hinge'],\n 'dual': [False],\n # 'C': [.001, .01, .1, 1, 10],\n 'C': [i for i in np.linspace(0.001, 0.01, num=10)],\n # 'tol': [.0001, .001, .01, .1, 1],\n 'tol': [i for i in np.linspace(0.01, 0.1, num=10)],\n 'fit_intercept': [True],\n 'class_weight': class_weights,\n }\n\n _ = grid_search(\n ActiveLinearSVM,\n ft_params,\n train_size,\n active_learning_iter,\n active_learning=active_learning,\n w_hx=w_hx,\n w_k=w_k,\n info=True\n )\n else:\n # Load the desired sized dataset under desired option\n amine_list, x_t, y_t, x_v, y_v, all_data, all_labels = process_dataset(\n train_size=train_size,\n active_learning_iter=active_learning_iter,\n verbose=verbose,\n cross_validation=cross_validation,\n full=full,\n active_learning=active_learning,\n w_hx=w_hx,\n w_k=w_k\n )\n\n # print(amine_list)\n for amine in amine_list:\n if cross_validation:\n # print(\"Training and cross validation on {} amine.\".format(amine))\n\n # Create the LinearSVM model instance for the specific amine\n ALSVM = ActiveLinearSVM(\n amine=amine,\n config=config,\n verbose=verbose,\n stats_path=stats_path,\n model_name=model_name)\n\n # Load the training and validation set into the model\n ALSVM.load_dataset(x_t[amine], y_t[amine], x_v[amine], y_v[amine], all_data[amine], all_labels[amine])\n\n # Train the data on the training set\n ALSVM.train(warning=warning)\n\n # Conduct active learning with all the observations available in the pool\n if active_learning:\n ALSVM.active_learning(num_iter=active_learning_iter, warning=warning, to_params=to_params)\n else:\n ALSVM.store_metrics_to_params()\n\n # Save the model for future reproducibility\n if save_model:\n ALSVM.save_model(model_name)\n\n # TODO: testing part not implemented: might need to change the logic loading things in",
"def train(self,\n step_size=0.01,\n max_steps=50001,\n b_=0,\n verbose=True,\n optimizer=None,\n log_training = False,\n batch_size = 0):\n\n\n self.model_loaded = True\n if self.graph is None:\n self.graph = tf.Graph()\n build_graph = True\n else:\n build_graph = False\n\n with self.graph.as_default():\n\n if self.sess == None:\n sess = tf.Session()\n self.sess = sess\n else:\n sess = self.sess\n\n for n in self.nodes:\n n.set_prefactors()\n\n # Build all the required tensors\n logits_list = self.build_network()\n cost_list = self.get_cost(logits_list)\n train_feed_dict, valid_feed_dict = self.get_feed('train')\n\n\n\n cost = 0\n for c in cost_list:\n cost += c\n\n # Create regularization parameters for every distinct namescope\n b = {}\n if build_graph:\n for ns in self.distinct_namescopes():\n b[ns] = tf.placeholder(tf.float32, name = '{}/b'.format(ns))\n else:\n for ns in self.distinct_namescopes():\n b[ns] = self.graph.get_tensor_by_name('{}/b:0'.format(ns))\n\n # L2-loss\n loss = 0\n with tf.variable_scope(\"\", reuse = True):\n for n in self.nodes:\n if not isinstance(n, Subnetnode): continue\n\n for l, layer in enumerate(n.layers):\n name = n.name\n loss += tf.nn.l2_loss(tf.get_variable(\"{}/W{}\".format(name, l+1))) * \\\n b[name]/layer\n\n cost += loss\n\n if b_ == 0:\n b_ = [0] * len(self.distinct_namescopes())\n\n for i, ns in enumerate(self.distinct_namescopes()):\n train_feed_dict['{}/b:0'.format(ns)] = b_[i]\n valid_feed_dict['{}/b:0'.format(ns)] = 0\n\n\n if self.optimizer == None:\n if optimizer == None:\n self.optimizer = tf.train.AdamOptimizer(learning_rate = step_size)\n else:\n self.optimizer = optimizer\n\n train_step = self.optimizer.minimize(cost)\n\n # Workaround to load the AdamOptimizer variables\n if not self.checkpoint_path == None:\n saver = tf.train.Saver()\n saver.restore(self.sess,self.checkpoint_path)\n self.checkpoint_path = None\n\n ml.initialize_uninitialized(self.sess)\n\n self.initialized = True\n\n train_writer = tf.summary.FileWriter('./log/',\n self.graph)\n\n old_cost = 1e8\n\n statistics = {}\n statistics['time_trained'] = time()\n statistics['total_cost'] = []\n statistics['loss'] = []\n statistics['partial_cost'] = {}\n for t in self.find_targetnodes():\n statistics['partial_cost'][t.name] = []\n\n for _ in range(0,max_steps):\n\n if batch_size > 0:\n start = 0\n while(start != -1):\n batch_feed_dict, start = ml.get_batch_feed(train_feed_dict,\n start, batch_size)\n sess.run(train_step, feed_dict = batch_feed_dict)\n else:\n sess.run(train_step, feed_dict=train_feed_dict)\n\n # if _%int(max_steps/100) == 0 and adaptive_rate == True:\n # new_cost = sess.run(tf.sqrt(cost),\n # feed_dict=train_feed_dict)\n #\n # if new_cost > old_cost:\n # step_size /= 2\n # print('Step size decreased to {}'.format(step_size))\n # train_step = tf.train.GradientDescentOptimizer(step_size).minimize(cost)\n # old_cost = new_cost\n\n # Log training process\n if _%int(max_steps/100) == 0 and log_training:\n statistics['total_cost'].append(sess.run(tf.sqrt(cost),\n feed_dict=valid_feed_dict))\n statistics['loss'].append(sess.run(loss,\n feed_dict=valid_feed_dict))\n if len(cost_list) > 1:\n for t, c in zip(self.find_targetnodes(), cost_list):\n statistics['partial_cost'][t.name].append(sess.run(tf.sqrt(c),\n feed_dict=valid_feed_dict))\n\n # Print training process\n if _%int(max_steps/10) == 0 and verbose:\n print('Step: ' + str(_))\n print('Training set loss:')\n if len(cost_list) > 1:\n for t, c in zip(self.find_targetnodes(), cost_list):\n print('{}: {}'.format(t.name,sess.run(tf.sqrt(c),\n feed_dict=train_feed_dict)))\n print('Total: {}'.format(sess.run(tf.sqrt(cost-loss),\n feed_dict=train_feed_dict)))\n print('Validation set loss:')\n if len(cost_list) > 1:\n for t, c in zip(self.find_targetnodes(), cost_list):\n print('{}: {}'.format(t.name, sess.run(tf.sqrt(c),\n feed_dict=valid_feed_dict)))\n print('Total: {}'.format(sess.run(tf.sqrt(cost),\n feed_dict=valid_feed_dict)))\n print('--------------------')\n print('L2-loss: {}'.format(sess.run(loss,\n feed_dict=train_feed_dict)))\n\n # Final log entry\n\n statistics['total_cost'].append(sess.run(tf.sqrt(cost),\n feed_dict=valid_feed_dict))\n statistics['loss'].append(sess.run(loss,\n feed_dict=valid_feed_dict))\n if len(cost_list) > 1:\n for t, c in zip(self.find_targetnodes(), cost_list):\n statistics['partial_cost'][t.name].append(sess.run(tf.sqrt(c),\n feed_dict=valid_feed_dict))\n statistics['time_trained'] = time() - statistics['time_trained']\n return statistics",
"def train(self): \n self.current_step = 0\n self.log = log_setup(self.args)\n self.current_gamma = self.args.initial_gamma\n with tf.Session(graph = self.computation_graph) as session:\n self.init.run()\n print(\"Model Initialized.\")\n for repetition in range(0, self.args.epochs):\n\n random.shuffle(self.nodes)\n self.optimization_time = 0 \n self.average_loss = 0\n\n epoch_printer(repetition)\n for i in tqdm(range(int(len(self.edges)/self.args.batch_size))):\n self.current_step = self.current_step + 1\n self.current_gamma = gamma_incrementer(self.current_step, self.args.initial_gamma, self.current_gamma, self.true_step_size)\n feed_dict = self.feed_dict_generator(self.edges[i*self.args.batch_size:(i+1)*self.args.batch_size], self.current_step, self.current_gamma)\n start = time.time()\n _, loss = session.run([self.train_op , self.loss], feed_dict=feed_dict)\n end = time.time()\n self.optimization_time = self.optimization_time + (end-start)\n self.average_loss = self.average_loss + loss\n\n print(\"\")\n self.average_loss = self.average_loss/self.vocab_size\n self.final_embeddings = self.factorization_layer.embedding_matrix.eval()\n if \"CODE\" in self.args.model: \n self.c_means = self.cluster_layer.cluster_means.eval()\n self.modularity_score, assignments = neural_modularity_calculator(self.graph, self.final_embeddings, self.c_means)\n else:\n self.modularity_score, assignments = classical_modularity_calculator(self.graph, self.final_embeddings, self.args)\n self.log = log_updater(self.log, repetition, self.average_loss, self.optimization_time, self.modularity_score)\n tab_printer(self.log)\n if \"CODE\" in self.args.model: \n initiate_dump_grafcode(self.log, assignments, self.args, self.final_embeddings, self.c_means)\n else:\n initiate_dump_graf(self.log, assignments, self.args, self.final_embeddings)",
"def train_classifiers(params):\n # Create result dataframe\n out = pd.DataFrame(\n columns=[\"Dataset\", \"Classifier\", \"Accuracy\", \"F1\", \"Precision\", \"Recall\"])\n\n for model_type, all_languages in params.items():\n print(\"Classifier: \", str(model_type))\n\n for language, all_targets in all_languages.items():\n print(language)\n for target, model_params in all_targets.items():\n print(target)\n print(model_params)\n\n datasets = sample_datasets(\n language, target, SAMPLING, TFIDF, model_params['top_k_words'], SUB_SAMPLE_RERUNS)\n\n # Iterate the datasets\n for data_id, dataset in enumerate(datasets):\n dataset_name = dataset[0]\n data = dataset[1]\n y = np.array(dataset[2])\n val_data = dataset[3]\n val_y = np.array(dataset[4])\n\n acc_scores = []\n pre_scores = []\n rec_scores = []\n f1_scores = []\n \n global X_train\n X_train, X_test = data, val_data\n y_train, y_test = y, val_y\n y_pred = None\n\n # Create model instance.\n model = mlp_model(layers=model_params['hidden_layers'], units=model_params['hidden_units'], dropout_rate=model_params['dropout_rate'],\n input_shape=X_train.shape[1:], num_classes=2)\n optimizer = tf.keras.optimizers.Adam(\n lr=model_params['learning_rate'])\n model.compile(optimizer=optimizer,\n loss='binary_crossentropy', metrics=['acc'])\n\n # Stop training is validation loss doesnt decrease for 3 steps\n callbacks = [tf.keras.callbacks.EarlyStopping(\n monitor='val_loss', patience=3)]\n\n # Train and validate model.\n history = model.fit(\n X_train,\n y_train,\n epochs=model_params['epochs'],\n callbacks=callbacks,\n validation_data=(X_test, y_test),\n verbose=0,\n batch_size=512)\n\n acc_scores.append(\n history.history['val_acc'][-1])\n y_pred = [round(a[0])\n for a in model.predict(X_test)]\n\n # Compute the results\n prfs = precision_recall_fscore_support(\n y_test, y_pred, warn_for=[])\n\n pre_scores.append(prfs[0].mean())\n rec_scores.append(prfs[1].mean())\n f1_scores.append(prfs[2].mean())\n\n # Append average scores\n clf_acc = np.array(acc_scores).mean()\n clf_pre = np.array(pre_scores).mean()\n clf_rec = np.array(rec_scores).mean()\n clf_f1 = np.array(f1_scores).mean()\n\n out = out.append(pd.DataFrame(\n [[dataset_name, model_type, clf_acc, clf_f1, clf_pre, clf_rec]], columns=out.columns), ignore_index=True)\n\n return out",
"def fit(self,\n X_train,\n y_train, \n X_test, \n y_test,\n max_evals,\n **kwargs,\n ):\n \n self.max_evals = max_evals\n \n for key in self.models_dict.keys():\n \n path_model_dir = self.path_model_dirs[key]\n \n if self.verbose >=1: \n print('\\n----',key,'----')\n print('path_model_dir:',path_model_dir)\n \n model_dict = self.models_dict[key]\n model_type = str(type(model_dict['model']))\n \n if 'sklearn' in model_type or 'xgboost' in model_type:\n path_file = _os.path.join(path_model_dir,'model_dict.dill')\n elif 'Net' in key:\n path_file = _os.path.join(path_model_dir,'best_model.h5')\n \n if self.retrain or _os.path.isfile(path_file)==False:\n model_dict = self._single_model_BayesianSearchCV(key, \n model_dict, \n X_train, y_train, \n X_test, y_test,\n path_model_dir,\n **kwargs)\n self.models_dict[key] = model_dict\n \n\n else: #reload previously trained model\n if 'sklearn' in str(type(self.models_dict[key]['model'])):\n self.models_dict[key] = self.load('model_dict', 'dill', path_model_dir)\n elif 'Net' in key:\n #check kwargs for epochs\n epochs = 100\n for item in self.kwargs.items():\n if 'epochs' in item[0]: epochs = item[1]\n self.models_dict[key]['best_model'] = _NeuralNet.utils.load_model(\n _os.path.join(path_model_dir,'best_model.h5'))\n self.models_dict[key]['best_params'] = self.load('best_params', 'dill', path_model_dir)\n \n if 'Net' in key:\n y_pred = self.models_dict[key]['best_model'].predict(_np.array(X_test))\n else:\n y_pred = self.models_dict[key]['best_model'].predict(X_test)\n \n\n if 'Net' not in key:\n self.models_dict[key]['best_pred_score'] = self.models_dict[key]['best_model'].score(X_test, y_test)\n y_pred_proba = self.models_dict[key]['best_model'].predict_proba(X_test)[:,1]\n else:\n \n if 'crossentropy' in self.models_dict[key]['best_model'].loss:\n y_pred_proba = y_pred\n y_pred = (y_pred < 0.5).astype(int)\n \n self.models_dict[key]['best_pred_score'] = self.models_dict[key]['best_model'].evaluate(_np.array(X_test), \n _np.array(y_test),\n verbose =0)\n \n if self.verbose >=1:\n try:\n print('\\tbest_cv_score:',self.models_dict[key]['best_cv_score'])\n except Exception as e:\n print('Exception occured for:'+str(e))\n try:\n print('\\tbest_pred_score:',self.models_dict[key]['best_pred_score'])\n except Exception as e:\n print('Exception occured for:'+str(e))\n\n for metric_key in self.metrics.keys():\n if self.metrics[metric_key] !=None:\n try:\n if 'roc' in metric_key:\n self.models_dict[key][metric_key] = self.metrics[metric_key](y_test, y_pred_proba)\n else:\n self.models_dict[key][metric_key] = self.metrics[metric_key](y_test, y_pred)\n print('\\t',metric_key,':',self.models_dict[key][metric_key])\n except Exception as e:\n print('Exception occured for',metric_key,':',str(e))\n\n if 'sklearn' in str(type(self.models_dict[key]['model'])):\n self.save(self.models_dict[key], 'model_dict', 'dill', path_model_dir)\n elif 'Net' in key:\n model_dict_subset = self.models_dict[key].copy()\n for key in self.models_dict[key].keys():\n if key not in ['y_test','y_pred','best_pred_score'] +list(self.metrics.keys()):\n model_dict_subset.pop(key)",
"def train():\n\n ### DO NOT CHANGE SEEDS!\n # Set the random seeds for reproducibility\n np.random.seed(42)\n\n ## Prepare all functions\n # Get number of units in each hidden layer specified in the string such as 100,100\n if FLAGS.dnn_hidden_units:\n dnn_hidden_units = FLAGS.dnn_hidden_units.split(\",\")\n dnn_hidden_units = [int(dnn_hidden_unit_) for dnn_hidden_unit_ in dnn_hidden_units]\n else:\n dnn_hidden_units = []\n\n # Get negative slope parameter for LeakyReLU\n neg_slope = FLAGS.neg_slope\n\n ########################\n # PUT YOUR CODE HERE #\n #######################\n import matplotlib.pyplot as plt\n\n data = cifar10_utils.get_cifar10(FLAGS.data_dir)\n train = data['train']\n test = data['test']\n dim_x = train.images.shape[1]*train.images.shape[2]*train.images.shape[3]\n\n mlp = MLP(dim_x, dnn_hidden_units, train.labels.shape[1], neg_slope)\n loss_module = CrossEntropyModule()\n\n loss_train = np.zeros((int(np.floor(FLAGS.max_steps/FLAGS.eval_freq), )))\n loss_test = np.zeros((int(np.floor(FLAGS.max_steps/FLAGS.eval_freq), )))\n accuracy_test = np.zeros((int(np.floor(FLAGS.max_steps/FLAGS.eval_freq), )))\n\n images_test = test.images\n labels_test = test.labels\n images_test = np.reshape(images_test, (images_test.shape[0], dim_x))\n\n for i in range(0, FLAGS.max_steps):\n if PRINTS:\n print('iter', i+1, end='\\r')\n images, labels = train.next_batch(FLAGS.batch_size) \n images = np.reshape(images, (images.shape[0], dim_x))\n\n pred = mlp.forward(images)\n loss = loss_module.forward(pred, labels)\n loss_grad = loss_module.backward(pred, labels)\n mlp.backward(loss_grad)\n\n for module in reversed(mlp.modules):\n if isinstance(module, LinearModule):\n module.params['weight'] -= 1/FLAGS.batch_size*FLAGS.learning_rate*module.grads['weight']\n module.params['bias'] -= 1/FLAGS.batch_size*FLAGS.learning_rate*module.grads['bias']\n if (i+1) % FLAGS.eval_freq == 0:\n pred_test = mlp.forward(images_test)\n loss_train[i // FLAGS.eval_freq] = loss\n accuracy_test[i // FLAGS.eval_freq] = accuracy(pred_test, labels_test)\n loss_test[i // FLAGS.eval_freq] = loss_module.forward(pred_test, labels_test)\n if PRINTS:\n print()\n print('test_loss:', loss_test[i // FLAGS.eval_freq])\n print('test_accuracy:', accuracy_test[i // FLAGS.eval_freq])\n print('train_loss:', loss_train[i // FLAGS.eval_freq])\n\n if PLOTS:\n fig, ax = plt.subplots(1, 2, figsize=(10,5))\n fig.suptitle('Training curves for Numpy MLP\\nFinal test accuracy: {:0.4f}, default configuration'.format(accuracy_test[i // FLAGS.eval_freq]))\n\n ax[0].set_title('Loss')\n ax[0].set_ylabel('Loss value')\n ax[0].set_xlabel('No of batches seen x{}'.format(FLAGS.eval_freq))\n ax[0].plot(loss_train, label='Train')\n ax[0].plot(loss_test, label='Test')\n ax[0].legend()\n\n ax[1].set_title('Accuracy')\n ax[1].set_ylabel('Accuracy value')\n ax[1].set_xlabel('No of batches seen x{}'.format(FLAGS.eval_freq))\n ax[1].plot(accuracy_test, label='Test')\n ax[1].legend()\n plt.show()\n\n\n ########################\n # END OF YOUR CODE #\n #######################",
"def simple_XGBoost_model(X_XGB, Y_XGB, X_XGB_test, modeltype, log_y=False, GPU_flag=False,\r\n scaler = '', enc_method='label',verbose=0):\r\n columns = X_XGB.columns\r\n if isinstance(scaler, str):\r\n if not scaler == '':\r\n scaler = scaler.lower()\r\n if scaler == 'standard':\r\n scaler = StandardScaler()\r\n elif scaler == 'minmax':\r\n scaler = MinMaxScaler()\r\n else:\r\n scaler = StandardScaler()\r\n ######### G P U P R O C E S S I N G B E G I N S ############\r\n ###### This is where we set the CPU and GPU parameters for XGBoost\r\n if GPU_flag:\r\n GPU_exists = check_if_GPU_exists()\r\n else:\r\n GPU_exists = False\r\n ##### Set the Scoring Parameters here based on each model and preferences of user ###\r\n cpu_params = {}\r\n param = {}\r\n cpu_params['tree_method'] = 'hist'\r\n cpu_params['gpu_id'] = 0\r\n cpu_params['updater'] = 'grow_colmaker'\r\n cpu_params['predictor'] = 'cpu_predictor'\r\n if GPU_exists:\r\n param['tree_method'] = 'gpu_hist'\r\n param['gpu_id'] = 0\r\n param['updater'] = 'grow_gpu_hist' #'prune'\r\n param['predictor'] = 'gpu_predictor'\r\n print(' Running XGBoost using GPU parameters')\r\n else:\r\n param = copy.deepcopy(cpu_params)\r\n print(' Running XGBoost using CPU parameters')\r\n #################################################################################\r\n if modeltype == 'Regression':\r\n if log_y:\r\n Y_XGB.loc[Y_XGB==0] = 1e-15 ### just set something that is zero to a very small number\r\n xgb = XGBRegressor(\r\n booster = 'gbtree',\r\n colsample_bytree=0.5,\r\n alpha=0.015,\r\n gamma=4,\r\n learning_rate=0.1,\r\n max_depth=15,\r\n min_child_weight=2,\r\n n_estimators=1000,\r\n reg_lambda=0.5,\r\n \t #reg_alpha=8,\r\n subsample=0.7,\r\n random_state=99,\r\n objective='reg:squarederror',\r\n \t eval_metric='rmse',\r\n verbosity = 0,\r\n n_jobs=-1,\r\n silent = True)\r\n else:\r\n if Y_XGB.nunique() <= 2:\r\n objective='binary:logistic'\r\n eval_metric = 'logloss'\r\n else:\r\n objective='multi:softmax'\r\n eval_metric = 'mlogloss'\r\n xgb = XGBClassifier(\r\n booster = 'gbtree',\r\n colsample_bytree=0.5,\r\n alpha=0.015,\r\n gamma=4,\r\n learning_rate=0.1,\r\n max_depth=15,\r\n min_child_weight=2,\r\n n_estimators=1000,\r\n reg_lambda=0.5,\r\n objective=objective,\r\n subsample=0.7,\r\n random_state=99,\r\n n_jobs=-1,\r\n verbosity = 0,\r\n silent = True)\r\n\r\n #testing for GPU\r\n model = xgb.set_params(**param)\r\n if X_XGB.shape[0] >= 1000000:\r\n hyper_frac = 0.1\r\n elif X_XGB.shape[0] >= 100000:\r\n hyper_frac = 0.2\r\n elif X_XGB.shape[0] >= 10000:\r\n hyper_frac = 0.3\r\n else:\r\n hyper_frac = 0.4\r\n #### now select a random sample from X_XGB ##\r\n if modeltype == 'Regression':\r\n X_XGB_sample = X_XGB[:int(hyper_frac*X_XGB.shape[0])]\r\n Y_XGB_sample = Y_XGB[:int(hyper_frac*X_XGB.shape[0])]\r\n else:\r\n X_XGB_sample = X_XGB.sample(frac=hyper_frac, random_state=99)\r\n Y_XGB_sample = Y_XGB.sample(frac=hyper_frac, random_state=99)\r\n ######### Now set the number of rows we need to tune hyper params ###\r\n nums = int(X_XGB_sample.shape[0]*0.9)\r\n X_train = X_XGB_sample[:nums]\r\n X_valid = X_XGB_sample[nums:]\r\n Y_train = Y_XGB_sample[:nums]\r\n Y_valid = Y_XGB_sample[nums:]\r\n scoreFunction = { \"precision\": \"precision_weighted\",\"recall\": \"recall_weighted\"}\r\n params = {\r\n 'learning_rate': sp.stats.uniform(scale=1),\r\n 'gamma': sp.stats.randint(0, 32),\r\n 'n_estimators': sp.stats.randint(100,500),\r\n \"max_depth\": sp.stats.randint(3, 15),\r\n },\r\n model = RandomizedSearchCV(xgb.set_params(**param),\r\n param_distributions = params,\r\n n_iter = 10,\r\n return_train_score = True,\r\n random_state = 99,\r\n n_jobs=-1,\r\n #cv = 3,\r\n verbose = False)\r\n\r\n X_train, X_valid = data_transform(X_train, Y_train, X_valid,\r\n scaler=scaler, enc_method=enc_method)\r\n\r\n gbm_model = xgb_model_fit(model, X_train, Y_train, X_valid, Y_valid, modeltype,\r\n log_y, params, cpu_params)\r\n model = gbm_model.best_estimator_\r\n #############################################################################\r\n n_splits = 10\r\n ls=[]\r\n if modeltype == 'Regression':\r\n fold = KFold(n_splits=n_splits)\r\n else:\r\n fold = StratifiedKFold(shuffle=True, n_splits=n_splits, random_state=99)\r\n scores=[]\r\n if not isinstance(X_XGB_test, str):\r\n pred_xgbs = np.zeros(len(X_XGB_test))\r\n pred_probas = np.zeros(len(X_XGB_test))\r\n else:\r\n pred_xgbs = []\r\n pred_probas = []\r\n #### First convert test data into numeric using train data ###\r\n if not isinstance(X_XGB_test, str):\r\n _, X_XGB_test_enc = data_transform(X_XGB, Y_XGB, X_XGB_test,\r\n scaler=scaler, enc_method=enc_method)\r\n #### now run all the folds each one by one ##################################\r\n start_time = time.time()\r\n for folds, (train_index, test_index) in tqdm(enumerate(fold.split(X_XGB,Y_XGB))):\r\n x_train, x_test = X_XGB.iloc[train_index], X_XGB.iloc[test_index]\r\n if modeltype == 'Regression':\r\n if log_y:\r\n y_train, y_test = np.log(Y_XGB.iloc[train_index]), Y_XGB.iloc[test_index]\r\n else:\r\n y_train, y_test = Y_XGB.iloc[train_index], Y_XGB.iloc[test_index]\r\n else:\r\n y_train, y_test = Y_XGB.iloc[train_index], Y_XGB.iloc[test_index]\r\n\r\n ## scale the x_train and x_test values - use all columns -\r\n x_train, x_test = data_transform(x_train, y_train, x_test,\r\n scaler=scaler, enc_method=enc_method)\r\n\r\n model = gbm_model.best_estimator_\r\n model = xgb_model_fit(model, x_train, y_train, x_test, y_test, modeltype,\r\n log_y, params, cpu_params)\r\n\r\n #### now make predictions on validation data ##\r\n if modeltype == 'Regression':\r\n if log_y:\r\n preds = np.exp(model.predict(x_test))\r\n else:\r\n preds = model.predict(x_test)\r\n else:\r\n preds = model.predict(x_test)\r\n\r\n feature_importances = pd.DataFrame(model.feature_importances_,\r\n index = X_XGB.columns,\r\n columns=['importance'])\r\n sum_all=feature_importances.values\r\n ls.append(sum_all)\r\n ###### Time to consolidate the predictions on test data #########\r\n if modeltype == 'Regression':\r\n if not isinstance(X_XGB_test, str):\r\n if log_y:\r\n pred_xgb=np.exp(model.predict(X_XGB_test_enc[columns]))\r\n else:\r\n pred_xgb=model.predict(X_XGB_test_enc[columns])\r\n pred_xgbs = np.vstack([pred_xgbs, pred_xgb])\r\n pred_xgbs = pred_xgbs.mean(axis=0)\r\n if log_y:\r\n score = np.sqrt(mean_squared_log_error(y_test, preds))\r\n else:\r\n score = np.sqrt(mean_squared_error(y_test, preds))\r\n print('RMSE score in fold %d = %s' %(folds+1, score))\r\n else:\r\n if not isinstance(X_XGB_test, str):\r\n pred_xgb=model.predict(X_XGB_test_enc[columns])\r\n pred_proba = model.predict_proba(X_XGB_test_enc[columns])\r\n if folds == 0:\r\n pred_xgbs = copy.deepcopy(pred_xgb)\r\n pred_probas = copy.deepcopy(pred_proba)\r\n else:\r\n pred_xgbs = np.vstack([pred_xgbs, pred_xgb])\r\n pred_xgbs = stats.mode(pred_xgbs, axis=0)[0][0]\r\n pred_probas = np.mean( np.array([ pred_probas, pred_proba ]), axis=0 )\r\n score = balanced_accuracy_score(y_test, preds)\r\n print('Balanced Accuracy score in fold %d = %0.1f%%' %(folds+1, score*100))\r\n scores.append(score)\r\n print(' Time taken for training XGB (in minutes) = %0.1f' %(\r\n (time.time()-start_time)/60))\r\n if verbose:\r\n plot_importances_XGB(train_set=X_XGB, labels=Y_XGB, ls=ls, y_preds=pred_xgbs,\r\n modeltype=modeltype)\r\n print(\"Average scores are: \", np.sum(scores)/len(scores))\r\n print('\\nReturning the following:')\r\n print(' Model = %s' %model)\r\n if modeltype == 'Regression':\r\n print(' final predictions', pred_xgbs[:10])\r\n return (pred_xgbs, model)\r\n else:\r\n print(' final predictions', pred_xgbs[:10])\r\n print(' predicted probabilities', pred_probas[:1])\r\n return (pred_xgbs, pred_probas, model)",
"def eval_model(args):\n cfg, lbl = util.get_label_cfg_by_args(args)\n uid = cfg['uniqueid']\n print('We are playing with %s' % uid)\n outdir='models/%s/gate_expert' % uid\n outname='gate_expert_model.pt'\n if KLLOSS:\n outname = 'gate_expert_kldiv_model.pt'\n if args.warm:\n outname = outname.replace('.pt', '_warm.pt')\n mdl_path = os.path.join(outdir, outname)\n gate_expert = GateExpertNet(mdl_path, args.argmax)\n eval_fun = gate_expert.get_y\n\n data = npload(cfg['file_path'], uid)\n datax = data[cfg['x_name']]\n datay = data[cfg['y_name']]\n evaly = eval_fun(datax)\n print(np.histogram(evaly[:, 48]))\n fig, ax = pld.get3dAxis()\n ax.scatter(datax[:, 0], datax[:, 1], evaly[:, 48])\n loss = l1loss(evaly, datay)\n err_norm = np.mean(loss, axis=1)\n fig, ax = plt.subplots()\n ax.hist(err_norm)\n plt.show()",
"def create_XGBoost_model():\n model = sklearn.ensemble.GradientBoostingRegressor(n_estimators=300, learning_rate=0.05)\n return sklearn.multioutput.RegressorChain(model)"
] | [
"0.67224866",
"0.6687999",
"0.653288",
"0.65224236",
"0.6393243",
"0.63662755",
"0.6364721",
"0.6355219",
"0.6346097",
"0.63401467",
"0.6327798",
"0.63157797",
"0.6307998",
"0.6305631",
"0.6280537",
"0.6278629",
"0.6274313",
"0.6273152",
"0.62397677",
"0.622105",
"0.62110114",
"0.6202107",
"0.6202092",
"0.6191887",
"0.619059",
"0.6179162",
"0.61752194",
"0.6160608",
"0.6159777",
"0.6153603",
"0.61416745",
"0.6118388",
"0.61140156",
"0.6090141",
"0.6085019",
"0.6081151",
"0.60794723",
"0.60716385",
"0.60651785",
"0.60526794",
"0.60476166",
"0.6035133",
"0.60329765",
"0.602998",
"0.6028637",
"0.6023481",
"0.6022663",
"0.6013734",
"0.60125995",
"0.60099155",
"0.60017085",
"0.6001043",
"0.5995912",
"0.5990119",
"0.59895563",
"0.5989025",
"0.59883875",
"0.59849286",
"0.59842694",
"0.5982271",
"0.5981562",
"0.5978732",
"0.59786886",
"0.59742224",
"0.597314",
"0.5965474",
"0.59632635",
"0.59607273",
"0.59527147",
"0.5949185",
"0.5948142",
"0.5948141",
"0.59481215",
"0.5943682",
"0.59428006",
"0.5933886",
"0.5930593",
"0.5928285",
"0.5928067",
"0.592552",
"0.592552",
"0.592552",
"0.592552",
"0.592552",
"0.59248984",
"0.59245265",
"0.5922126",
"0.59210753",
"0.5915958",
"0.59157926",
"0.59127426",
"0.59123474",
"0.59111166",
"0.5909133",
"0.5906001",
"0.5902122",
"0.5900891",
"0.5898565",
"0.5898478",
"0.58973044",
"0.589377"
] | 0.0 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.