query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| metadata
dict | negatives
sequencelengths 30
30
| negative_scores
sequencelengths 30
30
| document_score
stringlengths 4
10
| document_rank
stringclasses 2
values |
---|---|---|---|---|---|---|
test get all accessible by hash as superuser returns global template | def test_get_all_accessible_by_hash_as_superuser_returns_global_template(
self,
):
mock_request = create_mock_request(user=self.superuser1)
templates = template_api.get_all_accessible_by_hash(
self.fixture.global_template.hash, request=mock_request
)
self.assertTrue(self.fixture.user1_template not in list(templates))
self.assertTrue(self.fixture.user2_template not in list(templates))
self.assertTrue(self.fixture.global_template in list(templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_as_superuser_returns_all_templates(self):\n mock_request = create_mock_request(user=self.superuser)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 3)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_as_anonymous_with_access_right_returns_global_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_as_staff_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.staff_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_retrieve_template_registration(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))"
] | [
"0.7841484",
"0.779551",
"0.7761922",
"0.76789105",
"0.7675325",
"0.75503117",
"0.75284576",
"0.75161767",
"0.7387159",
"0.73823154",
"0.73564553",
"0.73227084",
"0.72829455",
"0.716237",
"0.7129225",
"0.7032054",
"0.69710624",
"0.6944363",
"0.68697715",
"0.68558455",
"0.6848517",
"0.6843913",
"0.68274176",
"0.68216294",
"0.6807154",
"0.67582434",
"0.6734726",
"0.66071844",
"0.65861374",
"0.6534793"
] | 0.80500853 | 0 |
test get all accessible by hash list as anonymous with access right does not return user template | def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(
self,
):
mock_request = create_mock_request(user=self.anonymous_user)
templates = template_api.get_all_accessible_by_hash_list(
[self.fixture.user1_template.hash], request=mock_request
)
self.assertTrue(templates.count() == 0) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_id_list_as_user_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_id_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_id_list_as_superuser_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_as_anonymous_with_access_right_returns_global_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 1)\n self.assertTrue((template.user is None for template in templates))"
] | [
"0.82188314",
"0.81064343",
"0.80074686",
"0.7999916",
"0.7974573",
"0.794547",
"0.7922802",
"0.7900087",
"0.7856008",
"0.7839907",
"0.7780824",
"0.77454853",
"0.7712532",
"0.76505816",
"0.76493615",
"0.76412743",
"0.75517875",
"0.7539482",
"0.7521282",
"0.7480579",
"0.7430734",
"0.7401914",
"0.7347694",
"0.72538054",
"0.7197252",
"0.7074336",
"0.7034771",
"0.7025603",
"0.69682807",
"0.68926406"
] | 0.81870437 | 1 |
test get all accessible by hash list as anonymous does not return global | def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_global(
self,
):
mock_request = create_mock_request(user=self.anonymous_user)
templates = template_api.get_all_accessible_by_hash_list(
[self.fixture.global_template.hash], request=mock_request
)
self.assertTrue(templates.count() == 0) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def keysAll():",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def __call__(self):\n return self._main._keys()",
"def test_get_list(self):\n pass",
"def test_get_token_supply_all_using_get(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def keys():",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_keys(self):\n response = self.client.get_keys()\n assert isinstance(response, dict)\n assert 'public' in response\n assert 'private' in response\n assert response['public'] is not None\n assert response['private'] is not None",
"def __call__(self):\n return self._main._items()",
"def test_get_direct_access_list(self):\n result = self.param_dict.get_direct_access_list()\n self.assertTrue(isinstance(result, list))\n self.assertEquals(len(result), 2)\n self.assert_(\"foo\" in result)\n self.assert_(\"baz\" in result)",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all(redis):\n store.store(1, 'some', data={'foo': 'bar'})\n store.store(1, 'other', triggering=True)\n store.store(2, 'irrelevant')\n\n res = list(store.get_all(1))\n\n assert res == [\n {'name': 'some', 'triggering': '0', 'foo': 'bar'},\n {'name': 'other', 'triggering': '1'},\n ]\n\n # does not clear by default\n assert list(store.get_all(1)) == res\n assert store.pending_profile_ids() == set(['1'])",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_variables_get(self):\n pass"
] | [
"0.72003764",
"0.6906085",
"0.682766",
"0.6713539",
"0.6642761",
"0.6638915",
"0.6523141",
"0.6474281",
"0.6390187",
"0.6310114",
"0.6268782",
"0.6232631",
"0.6226728",
"0.6178704",
"0.6056042",
"0.60132897",
"0.6012866",
"0.5999089",
"0.5983888",
"0.59816855",
"0.5979013",
"0.59730625",
"0.59610456",
"0.59155095",
"0.5904536",
"0.5873604",
"0.58493567",
"0.58025855",
"0.57963026",
"0.57943094"
] | 0.70168906 | 1 |
test get all accessible by hash list as anonymous with access right returns global | def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(
self,
):
mock_request = create_mock_request(user=self.anonymous_user)
templates = template_api.get_all_accessible_by_hash_list(
[self.fixture.global_template.hash], request=mock_request
)
self.assertTrue(templates.count() == 1)
self.assertTrue((template.user is None for template in templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_direct_access_list(self):\n result = self.param_dict.get_direct_access_list()\n self.assertTrue(isinstance(result, list))\n self.assertEquals(len(result), 2)\n self.assert_(\"foo\" in result)\n self.assert_(\"baz\" in result)",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_id_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_token_supply_all_using_get(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_list(self):\n pass",
"def test_get_keys(self):\n response = self.client.get_keys()\n assert isinstance(response, dict)\n assert 'public' in response\n assert 'private' in response\n assert response['public'] is not None\n assert response['private'] is not None",
"def keysAll():",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def call(self) -> List[Dict]:",
"def access():",
"def get_all_access():\n\t# Get the email from the user making the request\n\temail = get_jwt_identity()\n\treturn get_all_access_helper(email)"
] | [
"0.72204554",
"0.6992188",
"0.69133",
"0.67211556",
"0.6717264",
"0.66367173",
"0.6608304",
"0.6528222",
"0.6494193",
"0.6466781",
"0.63778913",
"0.634835",
"0.6266997",
"0.6256954",
"0.6207588",
"0.6187965",
"0.6110008",
"0.6073582",
"0.6053851",
"0.60277724",
"0.5977818",
"0.59763354",
"0.5968159",
"0.5925161",
"0.59169567",
"0.58995014",
"0.5882636",
"0.58321494",
"0.58244646",
"0.58234185"
] | 0.7492252 | 0 |
test get all accessible by hash list as user returns user template | def test_get_all_accessible_by_hash_list_as_user_returns_user_template(
self,
):
mock_request = create_mock_request(user=self.user1)
templates = template_api.get_all_accessible_by_hash_list(
[self.fixture.user1_template.hash], request=mock_request
)
self.assertTrue(self.fixture.user1_template in list(templates))
self.assertTrue(self.fixture.user2_template not in list(templates))
self.assertTrue(self.fixture.global_template not in list(templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_list_user(self):\n pass",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_id_list_as_user_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def get_all_users():",
"def test_list(self):\n self.userbase('create', 'alice', 'localhost', SECRET)\n self.userbase('create', 'bob', 'localhost', SECRET)\n output = self.userbase('list')\n self.assertEqual(output, ['alice@localhost', 'bob@localhost'])",
"def test_get_all_as_superuser_returns_all_templates(self):\n mock_request = create_mock_request(user=self.superuser)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 3)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))"
] | [
"0.8133613",
"0.8025096",
"0.79555947",
"0.7924744",
"0.77753305",
"0.77026737",
"0.75754946",
"0.7567176",
"0.7559033",
"0.75048995",
"0.7476242",
"0.7378073",
"0.73105985",
"0.73017",
"0.72951525",
"0.72721916",
"0.7231697",
"0.7122126",
"0.70607495",
"0.7040041",
"0.70203257",
"0.6999441",
"0.6959033",
"0.68824726",
"0.6852842",
"0.67096406",
"0.66587156",
"0.6633054",
"0.6633035",
"0.6587904"
] | 0.8334091 | 0 |
test get all accessible by hash list as user returns global template | def test_get_all_accessible_by_hash_list_as_user_returns_global_template(
self,
):
mock_request = create_mock_request(user=self.user1)
templates = template_api.get_all_accessible_by_hash_list(
[self.fixture.global_template.hash], request=mock_request
)
self.assertTrue(self.fixture.user1_template not in list(templates))
self.assertTrue(self.fixture.user2_template not in list(templates))
self.assertTrue(self.fixture.global_template in list(templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_as_anonymous_with_access_right_returns_global_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_as_superuser_returns_all_templates(self):\n mock_request = create_mock_request(user=self.superuser)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 3)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def T(request):\n\treturn all_templates[request.param]",
"def test_get_all_accessible_by_id_list_as_user_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))"
] | [
"0.7817108",
"0.7674833",
"0.76448846",
"0.7635715",
"0.76171523",
"0.7548915",
"0.7494546",
"0.74614096",
"0.7423985",
"0.7415341",
"0.7338782",
"0.7198786",
"0.7194994",
"0.7095257",
"0.7054384",
"0.7039704",
"0.70117605",
"0.6924233",
"0.69152415",
"0.68132144",
"0.6760775",
"0.66984826",
"0.6687999",
"0.66858375",
"0.6659667",
"0.6612261",
"0.6553612",
"0.65432787",
"0.6419558",
"0.63154906"
] | 0.792674 | 0 |
test get all accessible by hash list as staff returns user template | def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(
self,
):
mock_request = create_mock_request(user=self.staff_user1)
templates = template_api.get_all_accessible_by_hash_list(
[self.fixture.user1_template.hash], request=mock_request
)
self.assertTrue(self.fixture.user1_template in list(templates))
self.assertTrue(self.fixture.user2_template not in list(templates))
self.assertTrue(self.fixture.global_template not in list(templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_list_user(self):\n pass",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_as_staff_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.staff_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_id_list_as_staff_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_id_list_as_user_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_id_list_as_superuser_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)"
] | [
"0.81351954",
"0.8133138",
"0.8092263",
"0.78858453",
"0.7810828",
"0.7779971",
"0.7641144",
"0.7626335",
"0.7617192",
"0.76108444",
"0.74905986",
"0.74905354",
"0.747804",
"0.7434959",
"0.7407873",
"0.732234",
"0.7317942",
"0.7250191",
"0.71803576",
"0.7138569",
"0.70978194",
"0.70512795",
"0.7007727",
"0.69915295",
"0.6974713",
"0.69704574",
"0.68621063",
"0.685786",
"0.68483067",
"0.6753825"
] | 0.83508813 | 0 |
test get all accessible by hash list as staff returns global template | def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(
self,
):
mock_request = create_mock_request(user=self.staff_user1)
templates = template_api.get_all_accessible_by_hash_list(
[self.fixture.global_template.hash], request=mock_request
)
self.assertTrue(self.fixture.user1_template not in list(templates))
self.assertTrue(self.fixture.user2_template not in list(templates))
self.assertTrue(self.fixture.global_template in list(templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_as_staff_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.staff_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_id_list_as_superuser_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_as_superuser_returns_all_templates(self):\n mock_request = create_mock_request(user=self.superuser)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 3)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_id_list_as_staff_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))"
] | [
"0.7654985",
"0.7619861",
"0.7532686",
"0.75176096",
"0.7428983",
"0.7404091",
"0.7361139",
"0.7338509",
"0.7328727",
"0.7255129",
"0.72190326",
"0.71115243",
"0.69764596",
"0.69752246",
"0.6954954",
"0.69451195",
"0.6886439",
"0.68627393",
"0.6745489",
"0.67304397",
"0.66975796",
"0.6602104",
"0.65133274",
"0.64860564",
"0.64737004",
"0.6467699",
"0.64205253",
"0.6341731",
"0.6337809",
"0.62817633"
] | 0.78305656 | 0 |
test get all accessible by hash list as superuser returns user template | def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(
self,
):
mock_request = create_mock_request(user=self.superuser1)
templates = template_api.get_all_accessible_by_hash_list(
[self.fixture.user1_template.hash], request=mock_request
)
self.assertTrue(self.fixture.user1_template in list(templates))
self.assertTrue(self.fixture.user2_template not in list(templates))
self.assertTrue(self.fixture.global_template not in list(templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_list_user(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_list(self):\n self.userbase('create', 'alice', 'localhost', SECRET)\n self.userbase('create', 'bob', 'localhost', SECRET)\n output = self.userbase('list')\n self.assertEqual(output, ['alice@localhost', 'bob@localhost'])",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_as_superuser_returns_all_templates(self):\n mock_request = create_mock_request(user=self.superuser)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 3)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_id_list_as_superuser_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_id_list_as_user_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def get_all_users():"
] | [
"0.8227565",
"0.8194535",
"0.8007385",
"0.7911953",
"0.78929853",
"0.78840536",
"0.7775287",
"0.7738975",
"0.7630923",
"0.76105887",
"0.758177",
"0.75368434",
"0.7483725",
"0.7452379",
"0.7292541",
"0.72641546",
"0.7235065",
"0.7176262",
"0.71578574",
"0.71336734",
"0.71314454",
"0.70730203",
"0.7049349",
"0.7005631",
"0.69557524",
"0.6955012",
"0.68792886",
"0.6877479",
"0.67326206",
"0.67117065"
] | 0.84037524 | 0 |
test get all accessible by hash list as superuser returns global template | def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(
self,
):
mock_request = create_mock_request(user=self.superuser1)
templates = template_api.get_all_accessible_by_hash_list(
[self.fixture.global_template.hash], request=mock_request
)
self.assertTrue(self.fixture.user1_template not in list(templates))
self.assertTrue(self.fixture.user2_template not in list(templates))
self.assertTrue(self.fixture.global_template in list(templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_as_superuser_returns_all_templates(self):\n mock_request = create_mock_request(user=self.superuser)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 3)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_anonymous_does_not_return_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_id_list_as_superuser_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_as_anonymous_with_access_right_returns_global_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_as_staff_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.staff_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))"
] | [
"0.79852265",
"0.79441637",
"0.79198414",
"0.7865991",
"0.77729464",
"0.77234817",
"0.7679411",
"0.76214916",
"0.7617738",
"0.7583023",
"0.754689",
"0.7470992",
"0.7443684",
"0.72886294",
"0.7275211",
"0.72666967",
"0.7181501",
"0.7174229",
"0.7105018",
"0.6970292",
"0.69422275",
"0.6914545",
"0.6905688",
"0.6839105",
"0.6828524",
"0.6789579",
"0.67873406",
"0.6693985",
"0.6641485",
"0.6615645"
] | 0.81317466 | 0 |
test get all as anonymous with access right returns global templates | def test_get_all_as_anonymous_with_access_right_returns_global_templates(
self,
):
mock_request = create_mock_request(user=self.anonymous_user)
templates = template_api.get_all(request=mock_request)
self.assertEqual(templates.count(), 1)
self.assertTrue((template.user is None for template in templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def T(request):\n\treturn all_templates[request.param]",
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_list_template_for_all_namespaces(self):\n pass",
"def test_retrieve_template_registration(self):\n pass",
"def test_get_templates_in_virtualization_realm(self):\n pass",
"def test_get_all_as_superuser_returns_all_templates(self):\n mock_request = create_mock_request(user=self.superuser)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 3)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_activity_templates(self):\n pass",
"def test_get_global_template_as_anonymous_with_access_right_returns_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n template = template_api.get_by_id(\n self.fixture.global_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.global_template)",
"def test_create_template_for_all_namespaces(self):\n pass",
"def test_get_all_as_staff_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.staff_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_template(self):\n\t\tself.assertTemplateUsed(self.resp, 'inicio.html')",
"def test_templater(self):\n\n # Set a global templater for all items\n self.site.template(r\"(.*)\", lambda item: \"ALL\")\n # Set another templater on the index item\n self.site.template(r\"index.html\", lambda item: \"INDEX\")\n\n # Since an item can only have one templater, the index templater should have been overwritten\n self.assertEqual(\"INDEX\", self.site.items[\"index.html\"].templated)\n self.assertEqual(\"ALL\", self.site.items[\"test/test.html\"].templated)",
"def test_get_all_as_anonymous_returns_empty_list(self):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 0)",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_register_template(self):\n pass",
"def test_get_subscription_templates(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_render_all_templates():\n assert templates.xhook__initialize({})\n assert templates.xhook__handlers({})\n assert templates.xhook__enable()\n assert templates.xhook__release()\n assert templates.asserts__call_count({})\n assert templates.asserts__calls({})",
"def test_get_all_accessible_by_id_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_template(self):\n\t\tself.assertTemplateUsed(self.resp, 'cadastro.html')",
"def test_get_any_template_as_superuser_returns_template(self):\n mock_request = create_mock_request(user=self.superuser1)\n template = template_api.get_by_id(\n self.fixture.user1_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.user1_template)\n template = template_api.get_by_id(\n self.fixture.user2_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.user2_template)\n template = template_api.get_by_id(\n self.fixture.global_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.global_template)",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_template_home(self):\n self.assertTemplateUsed(self.response, 'index.html')",
"def test_get_activity_template(self):\n pass",
"def get_all_templates(cls):\n raise NotImplementedError()",
"def test_read_namespaced_template(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_list_template_registrations(self):\n pass"
] | [
"0.71963495",
"0.69589484",
"0.695767",
"0.6738414",
"0.6680882",
"0.6666209",
"0.6632277",
"0.66269696",
"0.6591374",
"0.65500444",
"0.65330005",
"0.64813143",
"0.6450166",
"0.6417387",
"0.6408952",
"0.64055634",
"0.63884926",
"0.6387879",
"0.6307593",
"0.62807924",
"0.62755466",
"0.627257",
"0.6225471",
"0.6217118",
"0.6216239",
"0.62011415",
"0.6200841",
"0.61885214",
"0.6183105",
"0.6177953"
] | 0.7641047 | 0 |
test get all as user returns accessible templates | def test_get_all_as_user_returns_accessible_templates(self):
mock_request = create_mock_request(user=self.user)
templates = template_api.get_all(request=mock_request)
self.assertEqual(templates.count(), 2)
self.assertTrue(self.fixture.user1_template in list(templates))
self.assertTrue(self.fixture.global_template in list(templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_as_superuser_returns_all_templates(self):\n mock_request = create_mock_request(user=self.superuser)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 3)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_as_staff_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.staff_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_as_anonymous_with_access_right_returns_global_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_id_list_as_superuser_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_id_list_as_user_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_any_template_as_superuser_returns_template(self):\n mock_request = create_mock_request(user=self.superuser1)\n template = template_api.get_by_id(\n self.fixture.user1_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.user1_template)\n template = template_api.get_by_id(\n self.fixture.user2_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.user2_template)\n template = template_api.get_by_id(\n self.fixture.global_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.global_template)",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_global_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_templates_in_virtualization_realm(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_id_list_as_staff_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_activity_templates(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_anonymous_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def T(request):\n\treturn all_templates[request.param]",
"def test_get_all_accessible_by_hash_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_user_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_id_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))"
] | [
"0.7973463",
"0.7884634",
"0.75864184",
"0.73863786",
"0.7368473",
"0.73545754",
"0.73322153",
"0.73156446",
"0.7303613",
"0.7301676",
"0.71165544",
"0.70814574",
"0.7071253",
"0.7070465",
"0.7058631",
"0.69952977",
"0.6969601",
"0.6948563",
"0.6939556",
"0.6878665",
"0.68417907",
"0.6841685",
"0.68300533",
"0.6802377",
"0.6784579",
"0.67793286",
"0.6777047",
"0.67730683",
"0.67729974",
"0.67685676"
] | 0.8490938 | 0 |
test get all as staff returns accessible templates | def test_get_all_as_staff_returns_accessible_templates(self):
mock_request = create_mock_request(user=self.staff_user)
templates = template_api.get_all(request=mock_request)
self.assertEqual(templates.count(), 2)
self.assertTrue(self.fixture.user2_template in list(templates))
self.assertTrue(self.fixture.global_template in list(templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_id_list_as_staff_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_as_superuser_returns_all_templates(self):\n mock_request = create_mock_request(user=self.superuser)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 3)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_id_list_as_superuser_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_global_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_id_list_as_user_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_templates_in_virtualization_realm(self):\n pass",
"def test_get_all_as_anonymous_with_access_right_returns_global_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_any_template_as_superuser_returns_template(self):\n mock_request = create_mock_request(user=self.superuser1)\n template = template_api.get_by_id(\n self.fixture.user1_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.user1_template)\n template = template_api.get_by_id(\n self.fixture.user2_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.user2_template)\n template = template_api.get_by_id(\n self.fixture.global_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.global_template)",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_activity_templates(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_does_not_return_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user2_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_id_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_team_template_folders_get(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_list_virtualization_realm_templates(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_does_not_return_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 0)",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_team_template_folders_id_templates_get(self):\n pass",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_retrieve_template_registration(self):\n pass",
"def test_get_all_accessible_by_hash_list_as_anonymous_with_access_right_returns_global(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(templates.count() == 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))"
] | [
"0.77078307",
"0.76111203",
"0.7382082",
"0.7332171",
"0.7315884",
"0.73020625",
"0.7169267",
"0.709241",
"0.69971323",
"0.69874364",
"0.69600505",
"0.6950572",
"0.6810104",
"0.6807266",
"0.67712927",
"0.6756902",
"0.6676017",
"0.66577077",
"0.66072726",
"0.65592235",
"0.6539218",
"0.6531178",
"0.65287155",
"0.6526311",
"0.6507077",
"0.6501936",
"0.65014064",
"0.64512753",
"0.64064735",
"0.64058"
] | 0.8453167 | 0 |
test get all as superuser returns all templates | def test_get_all_as_superuser_returns_all_templates(self):
mock_request = create_mock_request(user=self.superuser)
templates = template_api.get_all(request=mock_request)
self.assertEqual(templates.count(), 3)
self.assertTrue(self.fixture.user1_template in list(templates))
self.assertTrue(self.fixture.user2_template in list(templates))
self.assertTrue(self.fixture.global_template in list(templates)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_all_as_user_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_as_staff_returns_accessible_templates(self):\n mock_request = create_mock_request(user=self.staff_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 2)\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_any_template_as_superuser_returns_template(self):\n mock_request = create_mock_request(user=self.superuser1)\n template = template_api.get_by_id(\n self.fixture.user1_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.user1_template)\n template = template_api.get_by_id(\n self.fixture.user2_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.user2_template)\n template = template_api.get_by_id(\n self.fixture.global_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.global_template)",
"def test_get_all_accessible_by_id_list_as_superuser_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_as_anonymous_with_access_right_returns_global_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 1)\n self.assertTrue((template.user is None for template in templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_superuser_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def get_templates(self):\n return self.http_call(\"get\", url=f\"{self.base_url}/templates\").json()",
"def get_all_templates(cls):\n raise NotImplementedError()",
"def test_get_all_accessible_by_hash_as_superuser_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.global_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_super(self):\n team = Team.create(name='foo', captain_id='User_cap',\n program_id=self.program.uid)\n team.put()\n user = User.create(name='super', email='[email protected]',\n user_type='super_admin')\n user.put()\n\n response = self.testapp.get(\n '/api/users',\n headers=self.login_headers(user),\n )\n response_list = json.loads(response.body)\n self.assertEqual(len(response_list), 1)",
"def test_get_all_user(self):\n response = self.client().get(AuthTestCase.admin)\n # assert the response code\n self.assertEqual(response.status_code, 200)",
"def get_all_templates(self):\n url = self.base_url + \"v2/template/\"\n\n resp = requests.get(url=url, headers=self.headers)\n return resp.json(), resp.status_code",
"def test_get_all_accessible_by_id_list_as_user_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_templates_in_virtualization_realm(self):\n pass",
"def test_list_templates_no_args(self):\n rv = TEST_CLIENT.get(\"/templates\")\n result = rv.json()\n\n expected = util.MOCK_TEMPLATE_LIST\n self.assertEqual(result, expected)\n self.assertEqual(rv.status_code, 200)",
"def list_templates(request):\n templates = models.Template.all().order('name')\n return utility.respond(request, 'admin/list_templates', {'templates': templates})",
"def test_get_all_accessible_by_hash_list_as_superuser_returns_other_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.superuser1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user2_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_admin_get_all(self):\n response = self.app.get('/api/v3/users', headers=self.admin_header)\n self.assertEqual(response.status_code, 200)",
"def T(request):\n\treturn all_templates[request.param]",
"def test_get_all_accessible_by_id_list_as_staff_returns_accessible_templates(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_id_list(\n self.template_id_list, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_as_staff_returns_user_template(self):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_list_as_staff_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_xml_template_get_all(self):\n xmlTemplatesExpected = XmlTemplate.objects.all()\n self.assertItemsEqual(XmlTemplate.get_all(), xmlTemplatesExpected)",
"def test_get_all_accessible_by_hash_list_as_user_returns_user_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.user1_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_accessible_by_hash_as_user_returns_user_template(self):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash(\n self.fixture.user1_template.hash, request=mock_request\n )\n self.assertTrue(self.fixture.user1_template in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template not in list(templates))",
"def test_get_all_as_anonymous_returns_empty_list(self):\n mock_request = create_mock_request(user=self.anonymous_user)\n templates = template_api.get_all(request=mock_request)\n self.assertEqual(templates.count(), 0)",
"def test_get_all_accessible_by_hash_list_as_staff_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))",
"def test_get_all_accessible_by_hash_list_as_user_returns_global_template(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n templates = template_api.get_all_accessible_by_hash_list(\n [self.fixture.global_template.hash], request=mock_request\n )\n self.assertTrue(self.fixture.user1_template not in list(templates))\n self.assertTrue(self.fixture.user2_template not in list(templates))\n self.assertTrue(self.fixture.global_template in list(templates))"
] | [
"0.80813885",
"0.7807613",
"0.73119265",
"0.7304258",
"0.7133047",
"0.7106707",
"0.7028065",
"0.70258",
"0.6983149",
"0.69324434",
"0.6922331",
"0.69057953",
"0.687215",
"0.6858583",
"0.683814",
"0.6838046",
"0.6819967",
"0.6750605",
"0.67349994",
"0.6714064",
"0.6704779",
"0.6680838",
"0.6673426",
"0.66708136",
"0.6669188",
"0.66570026",
"0.6637528",
"0.6632529",
"0.66223174",
"0.6616857"
] | 0.84164745 | 0 |
test delete user template as anonymous raises access control error | def test_delete_user_template_as_anonymous_raises_access_control_error(
self,
):
mock_request = create_mock_request(user=self.anonymous_user)
with self.assertRaises(AccessControlError):
template_api.delete(
self.fixture.user1_template, request=mock_request
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_user_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user1_template, request=mock_request\n )",
"def test_delete_global_template_as_user_raises_access_control_error(self):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_global_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_global_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_other_users_template_as_user_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_delete_user(self):\n pass",
"def test_delete_user(self):\n pass",
"def test_delete_other_users_template_as_staff_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_non_owner(self):\n another_user = CustomUser.objects.create(id=134, email='[email protected]', is_active=True)\n another_user.set_password('qwerty12345')\n another_user.save()\n\n self.client.login(email='[email protected]', password='qwerty12345')\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 87876})\n\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 403)",
"def test_delete_activity_template(self):\n pass",
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_anonymous_user_delete(self):\r\n with self.flask_app.test_request_context('/'):\r\n for token in self.auth_providers:\r\n assert_raises(Unauthorized,\r\n getattr(require, 'token').delete,\r\n token)",
"def allowed_topologytemplate_access_delete(user, template):\n try:\n up = user.get_profile()\n except AttributeError:\n return False\n\n return template.owner == user or user.has_perm(\"vnswww.topologytemplete_delete_any\") or (user.has_perm(\"vnswww.topologytemplete_delete_org\") and template.org == up.org)",
"def test_anonymous_user_delete_user_taskrun(self):\r\n\r\n with self.flask_app.test_request_context('/'):\r\n user_taskrun = TaskRunFactory.create()\r\n\r\n assert_raises(Unauthorized,\r\n getattr(require, 'taskrun').delete,\r\n user_taskrun)",
"def test_authenticated_user_delete_anonymous_taskrun(self):\r\n\r\n with self.flask_app.test_request_context('/'):\r\n anonymous_taskrun = AnonymousTaskRunFactory.create()\r\n\r\n assert_raises(Forbidden,\r\n getattr(require, 'taskrun').delete,\r\n anonymous_taskrun)",
"def test_anonymous_user_delete_anonymous_taskrun(self):\r\n\r\n with self.flask_app.test_request_context('/'):\r\n anonymous_taskrun = AnonymousTaskRunFactory.create()\r\n\r\n assert_raises(Unauthorized,\r\n getattr(require, 'taskrun').delete,\r\n anonymous_taskrun)",
"def test_delete_permission(self):\r\n self.assertFalse(self.creator_admin.has_delete_permission(self.request))",
"def test_delete_namespaced_template(self):\n pass",
"def test_users_username_delete(self):\n pass",
"def test_post_delete_logged_in(self):\n url = reverse('post-detail', kwargs={'pk': self.post.id})\n self.client.force_authenticate(user=self.user)\n response = self.client.delete(url)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def test_delete_other_users_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user2_template, request=mock_request)",
"def test_jenkins_user_delete(self):\n ju = JenkinsUser.objects.get(username=\"user_1\")\n self.assertRaises(django.db.models.deletion.ProtectedError, ju.delete)",
"def testDeleteIsAllowed(self):\n UserAPI().create([(u'user', u'secret', u'User', u'[email protected]')])\n namespaces = SecureNamespaceAPI(self.system.users['fluiddb'])\n namespaces.delete([u'user/private'])\n self.users.delete([u'user'])\n self.assertIdentical(None, getUser(u'user'))",
"def test_user_id_delete(self):\n pass",
"def test_delete_unauthenticated(self):\n\n url = reverse('file')\n\n data = {}\n\n response = self.client.delete(url, data)\n\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)",
"def test_anonymous_user_delete_blogpost(self):\r\n\r\n with self.flask_app.test_request_context('/'):\r\n blogpost = BlogpostFactory.create()\r\n\r\n assert_raises(Unauthorized, getattr(require, 'blogpost').delete, blogpost)",
"def test_delete_fail(self):\n self.user_api()\n self.base.metadata.create_all(self.engine)\n people = self.provision_users()\n p = {'id': people[2].id}\n self.delete('user', 403, params=p)",
"def test_delete_users_non_admin(client: FlaskClient) -> None:\n username = create_random_username()\n # Non-admin users are not allowed to make the request\n auth_token = create_auth_token(username)\n response = delete_users(client, auth_token.signed)\n assert_error_response(response, HTTPStatus.FORBIDDEN)"
] | [
"0.8109796",
"0.7890928",
"0.785249",
"0.78486097",
"0.7497058",
"0.7439203",
"0.7439203",
"0.7338107",
"0.73019093",
"0.72081596",
"0.71589935",
"0.7156079",
"0.7141743",
"0.71223885",
"0.70870435",
"0.70845276",
"0.70736223",
"0.70583034",
"0.7051745",
"0.7028054",
"0.69838506",
"0.69745046",
"0.69522595",
"0.6950804",
"0.6925022",
"0.6912706",
"0.6900592",
"0.6873293",
"0.6867148",
"0.6859766"
] | 0.8186267 | 0 |
test delete user template as anonymous with access right raises access control error | def test_delete_user_template_as_anonymous_with_access_right_raises_access_control_error(
self,
):
mock_request = create_mock_request(user=self.anonymous_user)
with self.assertRaises(AccessControlError):
template_api.delete(
self.fixture.user1_template, request=mock_request
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_user_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user1_template, request=mock_request\n )",
"def test_delete_global_template_as_user_raises_access_control_error(self):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_global_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_other_users_template_as_user_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_delete_global_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_other_users_template_as_staff_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_delete_user(self):\n pass",
"def test_delete_user(self):\n pass",
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_non_owner(self):\n another_user = CustomUser.objects.create(id=134, email='[email protected]', is_active=True)\n another_user.set_password('qwerty12345')\n another_user.save()\n\n self.client.login(email='[email protected]', password='qwerty12345')\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 87876})\n\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 403)",
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_permission(self):\r\n self.assertFalse(self.creator_admin.has_delete_permission(self.request))",
"def allowed_topologytemplate_access_delete(user, template):\n try:\n up = user.get_profile()\n except AttributeError:\n return False\n\n return template.owner == user or user.has_perm(\"vnswww.topologytemplete_delete_any\") or (user.has_perm(\"vnswww.topologytemplete_delete_org\") and template.org == up.org)",
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_activity_template(self):\n pass",
"def test_delete_other_users_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user2_template, request=mock_request)",
"def test_get_user_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.get_by_id(\n self.fixture.user1_template.id, request=mock_request\n )",
"def test_jenkins_user_delete(self):\n ju = JenkinsUser.objects.get(username=\"user_1\")\n self.assertRaises(django.db.models.deletion.ProtectedError, ju.delete)",
"def testDeleteIsAllowed(self):\n UserAPI().create([(u'user', u'secret', u'User', u'[email protected]')])\n namespaces = SecureNamespaceAPI(self.system.users['fluiddb'])\n namespaces.delete([u'user/private'])\n self.users.delete([u'user'])\n self.assertIdentical(None, getUser(u'user'))",
"def test_anonymous_user_delete_user_taskrun(self):\r\n\r\n with self.flask_app.test_request_context('/'):\r\n user_taskrun = TaskRunFactory.create()\r\n\r\n assert_raises(Unauthorized,\r\n getattr(require, 'taskrun').delete,\r\n user_taskrun)",
"def testDeleteUserIsDenied(self):\n [(objectID, username)] = UserAPI().create(\n [(u'user', u'secret', u'User', u'[email protected]')])\n self.store.commit()\n with login(u'user', objectID, self.transact) as session:\n deferred = self.facade.deleteUser(session, u'doomed')\n error = yield self.assertFailure(deferred, TPathPermissionDenied)\n self.assertEqual(u'doomed', error.path)",
"def test_delete_namespaced_template(self):\n pass",
"def test_post_delete_logged_in(self):\n url = reverse('post-detail', kwargs={'pk': self.post.id})\n self.client.force_authenticate(user=self.user)\n response = self.client.delete(url)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def test_anonymous_user_delete(self):\r\n with self.flask_app.test_request_context('/'):\r\n for token in self.auth_providers:\r\n assert_raises(Unauthorized,\r\n getattr(require, 'token').delete,\r\n token)",
"def test_delete_users_non_admin(client: FlaskClient) -> None:\n username = create_random_username()\n # Non-admin users are not allowed to make the request\n auth_token = create_auth_token(username)\n response = delete_users(client, auth_token.signed)\n assert_error_response(response, HTTPStatus.FORBIDDEN)",
"def test_authenticated_user_delete_anonymous_taskrun(self):\r\n\r\n with self.flask_app.test_request_context('/'):\r\n anonymous_taskrun = AnonymousTaskRunFactory.create()\r\n\r\n assert_raises(Forbidden,\r\n getattr(require, 'taskrun').delete,\r\n anonymous_taskrun)",
"def testDeleteAccessDenied(self):\n self.assertEqual(SequencingMachine.objects.count(), 1)\n self.runDelete(None, sequencer=self.hiseq2000.sodar_uuid)\n self.assertEqual(SequencingMachine.objects.count(), 1)\n self.response_401()\n for user in (self.guest, self.norole, self.unrelated_owner):\n self.assertEqual(SequencingMachine.objects.count(), 1)\n self.runDelete(user, sequencer=self.hiseq2000.sodar_uuid)\n self.assertEqual(SequencingMachine.objects.count(), 1)\n self.response_403()",
"def test_delete_fail(self):\n self.user_api()\n self.base.metadata.create_all(self.engine)\n people = self.provision_users()\n p = {'id': people[2].id}\n self.delete('user', 403, params=p)",
"def test_anonymous_user_delete_anonymous_taskrun(self):\r\n\r\n with self.flask_app.test_request_context('/'):\r\n anonymous_taskrun = AnonymousTaskRunFactory.create()\r\n\r\n assert_raises(Unauthorized,\r\n getattr(require, 'taskrun').delete,\r\n anonymous_taskrun)",
"def test_users_username_delete(self):\n pass"
] | [
"0.8130148",
"0.8038917",
"0.7928053",
"0.7784703",
"0.7748895",
"0.7658275",
"0.7418729",
"0.7418729",
"0.7411724",
"0.73322123",
"0.72632694",
"0.72585547",
"0.72350216",
"0.71767664",
"0.714434",
"0.7127663",
"0.7013489",
"0.7002689",
"0.6998825",
"0.6979983",
"0.69761485",
"0.69578755",
"0.6948649",
"0.6929375",
"0.69225824",
"0.69190884",
"0.6913715",
"0.69032776",
"0.68868166",
"0.68787116"
] | 0.82500076 | 0 |
test delete global template as anonymous with access right raises access control error | def test_delete_global_template_as_anonymous_with_access_right_raises_access_control_error(
self,
):
mock_request = create_mock_request(user=self.anonymous_user)
with self.assertRaises(AccessControlError):
template_api.delete(
self.fixture.global_template, request=mock_request
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_namespaced_template(self):\n pass",
"def test_delete_global_template_as_user_raises_access_control_error(self):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_global_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_unshare_template_registration(self):\n pass",
"def test_unregister_template(self):\n pass",
"def test_delete_activity_template(self):\n pass",
"def test_delete_user_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user1_template, request=mock_request\n )",
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_user_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user1_template, request=mock_request\n )",
"def test_delete_template_subscription(self):\n pass",
"def test_delete_device_template(self):\n pass",
"def test_delete_subscription_template(self):\n pass",
"def test_delete_global_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_collection_namespaced_template(self):\n pass",
"def test_delete_other_users_template_as_user_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_delete_other_users_template_as_staff_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_get_global_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.get_by_id(\n self.fixture.global_template.id, request=mock_request\n )",
"def test_share_template_registration(self):\n pass",
"def test_upsert_global_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.upsert(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_admin_from_org(self):\n pass",
"def test_commentary_view_delete(self):\n \n test_response = self.client.get('/papers/commentary/1/delete')\n self.assertEqual(test_response.status_code, 200)\n self.assertTrue('object' in test_response.context) \n self.assertTemplateUsed(test_response, 'base.html')\n self.assertTemplateUsed(test_response, 'confirm_delete.html')",
"def test_team_template_folders_id_delete(self):\n pass",
"def test_get_global_template_as_anonymous_with_access_right_returns_template(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n template = template_api.get_by_id(\n self.fixture.global_template.id, request=mock_request\n )\n self.assertEqual(template, self.fixture.global_template)",
"def test_upsert_global_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.upsert(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_permission(self):\r\n self.assertFalse(self.creator_admin.has_delete_permission(self.request))",
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_public_status_page_delete_public_status_page(self):\n pass",
"def test_delete_namespaced_route(self):\n pass",
"def test_delete_but_no_view_permission(client):\n user = user_with_permissions(\"polls.delete_poll\")\n\n url = reverse(\"admin:index\")\n client.force_login(user)\n\n response = client.get(url)\n assert parse_sidemenu(response) == {\"Global\": [\"/en/admin/\"], \"Polls\": [None]}"
] | [
"0.7957372",
"0.78233325",
"0.7783485",
"0.7648756",
"0.73494995",
"0.724509",
"0.7238044",
"0.7229371",
"0.7146958",
"0.70505244",
"0.694791",
"0.6860597",
"0.6852993",
"0.67819417",
"0.66837376",
"0.66768485",
"0.6628545",
"0.6627234",
"0.6593351",
"0.65602285",
"0.6522194",
"0.646566",
"0.64514464",
"0.64224994",
"0.6421927",
"0.64215434",
"0.64185625",
"0.6413753",
"0.63877773",
"0.6314466"
] | 0.78978837 | 1 |
test delete own template as user saves | def test_delete_own_template_as_user_saves(self):
mock_request = create_mock_request(user=self.user1)
template_api.delete(self.fixture.user1_template, request=mock_request) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_own_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_other_users_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user2_template, request=mock_request)",
"def test_delete_activity_template(self):\n pass",
"def test_delete_template_subscription(self):\n pass",
"def test_delete_namespaced_template(self):\n pass",
"def test_delete_global_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_xml_template_delete(self):\n XmlTemplate.delete_by_id(1)\n self.assertEqual(XmlTemplate.objects.count(), 1)\n self.assertFalse(XmlTemplate.objects.filter(pk=1).exists())",
"def test_delete_subscription_template(self):\n pass",
"def test_delete_device_template(self):\n pass",
"def test_delete_user(self):\n pass",
"def test_delete_user(self):\n pass",
"def test_commentary_view_delete(self):\n \n test_response = self.client.get('/papers/commentary/1/delete')\n self.assertEqual(test_response.status_code, 200)\n self.assertTrue('object' in test_response.context) \n self.assertTemplateUsed(test_response, 'base.html')\n self.assertTemplateUsed(test_response, 'confirm_delete.html')",
"def test_team_template_folders_id_delete(self):\n pass",
"def test_successfult_post_deletion(self):\n self.user.is_moderator = True\n self.user.save()\n response = self.client.delete(reverse('api:posts-detail', kwargs={'pk': self.post1.id}))\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(Post.objects.count(), 1)",
"def test_team_template_folders_id_templates_fk_delete(self):\n pass",
"def test_delete_user_field(self):\n pass",
"def test_functionality(self):\n templateName = \"Test Template\"\n self.browserObject = globalVars.browserObject\n \n #Check for current logged in user\n self.verifyCurrentUser(userRole='Administrator', loginAsUser=True)\n \n #Deleting Standard User\n userList = self.getLocalUsers(userName=globalVars.standardUser)\n if len(userList) > 0:\n self.deleteLocalUser(globalVars.standardUser, verifyUser=True)\n \n #Creates Sample Template if not exists\n self.createSampleTemplate(templateName=templateName, deleteAndCreate=True, publishedTemplate=True)",
"def test_delete(self):\n pass",
"def test_project_get_deleted_upon_user_delete(self):\n\n project = django_dynamic_fixture.get(Project)\n user1 = django_dynamic_fixture.get(User)\n project.users.add(user1)\n\n project.refresh_from_db()\n assert project.users.all().count() == 1\n\n # Delete the user\n user1.delete()\n # The object should not exist\n project = Project.objects.all().filter(id=project.id)\n assert not project.exists()",
"def test_delete_collection_user(self):\n pass",
"def test_user_id_delete(self):\n pass",
"def test_delete_function(self): \n self.new_user.save_prof()\n user2 = User_prof(username = \"mbugua\", bio = \"the world revolves\" ) \n user2.save_prof()\n \n user2.delete_prof()\n all = User_prof.objects.all()\n self.assertEqual(len(all),1)",
"def test_delete_template_success(self):\n template_id = util.MOCK_UUID_1\n\n rv = TEST_CLIENT.delete(f\"/templates/{template_id}\")\n result = rv.json()\n\n expected = {\"message\": \"Template deleted\"}\n self.assertDictEqual(expected, result)",
"def test_unshare_template_registration(self):\n pass",
"def test_users_username_delete(self):\n pass",
"def test_delete_global_template_as_user_raises_access_control_error(self):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_run(self):\n pass",
"def test_workflows_id_templates_fk_delete(self):\n pass"
] | [
"0.8405559",
"0.7938252",
"0.78544295",
"0.7835479",
"0.76984704",
"0.75773",
"0.75123143",
"0.7462032",
"0.73763233",
"0.7366287",
"0.7339748",
"0.72637725",
"0.72637725",
"0.7095875",
"0.70382494",
"0.6969727",
"0.6962131",
"0.6931763",
"0.6909945",
"0.68845177",
"0.68657106",
"0.685852",
"0.6822185",
"0.6780021",
"0.6778033",
"0.67671883",
"0.67565364",
"0.6736447",
"0.6705977",
"0.67031276"
] | 0.8729449 | 0 |
test delete other users template as user raises access control error | def test_delete_other_users_template_as_user_raises_access_control_error(
self,
):
mock_request = create_mock_request(user=self.user1)
with self.assertRaises(AccessControlError):
template_api.delete(
self.fixture.user2_template, request=mock_request
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_other_users_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user2_template, request=mock_request)",
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_other_users_template_as_staff_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_delete_global_template_as_user_raises_access_control_error(self):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_user(self):\n pass",
"def test_delete_user(self):\n pass",
"def test_delete_user_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user1_template, request=mock_request\n )",
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_user_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user1_template, request=mock_request\n )",
"def test_delete_own_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_activity_template(self):\n pass",
"def test_delete_non_owner(self):\n another_user = CustomUser.objects.create(id=134, email='[email protected]', is_active=True)\n another_user.set_password('qwerty12345')\n another_user.save()\n\n self.client.login(email='[email protected]', password='qwerty12345')\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 87876})\n\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 403)",
"def test_delete_fail(self):\n self.user_api()\n self.base.metadata.create_all(self.engine)\n people = self.provision_users()\n p = {'id': people[2].id}\n self.delete('user', 403, params=p)",
"def test_delete_collection_user(self):\n pass",
"def test_jenkins_user_delete(self):\n ju = JenkinsUser.objects.get(username=\"user_1\")\n self.assertRaises(django.db.models.deletion.ProtectedError, ju.delete)",
"def test_delete_permission(self):\r\n self.assertFalse(self.creator_admin.has_delete_permission(self.request))",
"def test_delete_template_subscription(self):\n pass",
"def test_delete_namespaced_template(self):\n pass",
"def test_delete(self):\n user = self.custodian_1_user\n urls = [reverse('api:user-detail', kwargs={'pk': user.pk})]\n data = None\n access = {\n \"forbidden\": [self.anonymous_client, self.readonly_client, self.custodian_1_client, self.admin_client,\n self.custodian_2_client],\n \"allowed\": []\n }\n\n for client in access['forbidden']:\n for url in urls:\n self.assertIn(\n client.delete(url, data, format='json').status_code,\n [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]\n )\n\n for client in access['allowed']:\n for url in urls:\n self.assertEqual(\n client.delete(url, data, format='json').status_code,\n status.HTTP_200_OK\n )",
"def test_groups_group_users_user_delete(self):\n pass",
"def test_groups_group_users_user_delete(self):\n pass",
"def test_delete_global_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_users_username_delete(self):\n pass",
"def test_delete_device_template(self):\n pass",
"def test_user_id_delete(self):\n pass",
"def test_delete_post_by_user(self):\n\n response = self.client.delete(reverse('api:posts-detail', kwargs={'pk': self.post1.id}))\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def test_delete_global_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def allowed_topologytemplate_access_delete(user, template):\n try:\n up = user.get_profile()\n except AttributeError:\n return False\n\n return template.owner == user or user.has_perm(\"vnswww.topologytemplete_delete_any\") or (user.has_perm(\"vnswww.topologytemplete_delete_org\") and template.org == up.org)",
"def test_delete(self, client, users):\n user = users[0]\n url = reverse('users:delete', args=(user.pk,))\n response = client.get(url)\n assert response.status_code == 405\n response = client.post(url)\n assert response.status_code == 302\n assert response.url == reverse('users:list')\n assert not get_user_model().objects.filter(pk=user.pk).exists()"
] | [
"0.8105018",
"0.8104827",
"0.8100035",
"0.8017625",
"0.80126005",
"0.7705949",
"0.7705949",
"0.7640928",
"0.7617843",
"0.7548961",
"0.75398606",
"0.7430284",
"0.73903745",
"0.72945106",
"0.7280149",
"0.72691876",
"0.723362",
"0.720127",
"0.7198595",
"0.71942717",
"0.7187567",
"0.7187567",
"0.7170138",
"0.71505135",
"0.7149363",
"0.71159655",
"0.7110281",
"0.7103699",
"0.70891345",
"0.707179"
] | 0.81878215 | 0 |
test delete global template as user raises access control error | def test_delete_global_template_as_user_raises_access_control_error(self):
mock_request = create_mock_request(user=self.user1)
with self.assertRaises(AccessControlError):
template_api.delete(
self.fixture.global_template, request=mock_request
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_namespaced_template(self):\n pass",
"def test_delete_global_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_unshare_template_registration(self):\n pass",
"def test_delete_global_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_activity_template(self):\n pass",
"def test_delete_global_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_device_template(self):\n pass",
"def test_unregister_template(self):\n pass",
"def test_delete_template_subscription(self):\n pass",
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_user_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user1_template, request=mock_request\n )",
"def test_delete_other_users_template_as_user_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_delete_user_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user1_template, request=mock_request\n )",
"def test_delete_subscription_template(self):\n pass",
"def test_delete_other_users_template_as_staff_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_delete_other_users_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user2_template, request=mock_request)",
"def test_team_template_folders_id_delete(self):\n pass",
"def test_functionality(self):\n templateName = \"Test Template\"\n self.browserObject = globalVars.browserObject\n \n #Check for current logged in user\n self.verifyCurrentUser(userRole='Administrator', loginAsUser=True)\n \n #Deleting Standard User\n userList = self.getLocalUsers(userName=globalVars.standardUser)\n if len(userList) > 0:\n self.deleteLocalUser(globalVars.standardUser, verifyUser=True)\n \n #Creates Sample Template if not exists\n self.createSampleTemplate(templateName=templateName, deleteAndCreate=True, publishedTemplate=True)",
"def test_delete_own_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_collection_namespaced_template(self):\n pass",
"def test_delete_permission(self):\r\n self.assertFalse(self.creator_admin.has_delete_permission(self.request))",
"def test_delete_admin_from_org(self):\n pass",
"def test_commentary_view_delete(self):\n \n test_response = self.client.get('/papers/commentary/1/delete')\n self.assertEqual(test_response.status_code, 200)\n self.assertTrue('object' in test_response.context) \n self.assertTemplateUsed(test_response, 'base.html')\n self.assertTemplateUsed(test_response, 'confirm_delete.html')",
"def test_delete_user(self):\n pass",
"def test_delete_user(self):\n pass",
"def allowed_topologytemplate_access_delete(user, template):\n try:\n up = user.get_profile()\n except AttributeError:\n return False\n\n return template.owner == user or user.has_perm(\"vnswww.topologytemplete_delete_any\") or (user.has_perm(\"vnswww.topologytemplete_delete_org\") and template.org == up.org)",
"def test_team_template_folders_id_templates_fk_delete(self):\n pass",
"def test_xml_template_delete(self):\n XmlTemplate.delete_by_id(1)\n self.assertEqual(XmlTemplate.objects.count(), 1)\n self.assertFalse(XmlTemplate.objects.filter(pk=1).exists())"
] | [
"0.8058154",
"0.78446215",
"0.7595148",
"0.75532234",
"0.75362784",
"0.753489",
"0.74993956",
"0.7437848",
"0.72980475",
"0.7283631",
"0.726282",
"0.7225051",
"0.7123989",
"0.711558",
"0.7096376",
"0.7071216",
"0.70578927",
"0.70301336",
"0.70142186",
"0.69239014",
"0.68534476",
"0.6800143",
"0.67917556",
"0.6785084",
"0.66950506",
"0.66913056",
"0.66913056",
"0.6687548",
"0.6586792",
"0.6573466"
] | 0.8217039 | 0 |
test delete own template as staff saves | def test_delete_own_template_as_staff_saves(self):
mock_request = create_mock_request(user=self.staff_user1)
template_api.delete(self.fixture.user1_template, request=mock_request) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_global_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_activity_template(self):\n pass",
"def test_delete_namespaced_template(self):\n pass",
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_template_subscription(self):\n pass",
"def test_team_template_folders_id_delete(self):\n pass",
"def test_xml_template_delete(self):\n XmlTemplate.delete_by_id(1)\n self.assertEqual(XmlTemplate.objects.count(), 1)\n self.assertFalse(XmlTemplate.objects.filter(pk=1).exists())",
"def test_delete_other_users_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user2_template, request=mock_request)",
"def test_team_template_folders_id_templates_fk_delete(self):\n pass",
"def test_delete_subscription_template(self):\n pass",
"def test_delete_device_template(self):\n pass",
"def test_commentary_view_delete(self):\n \n test_response = self.client.get('/papers/commentary/1/delete')\n self.assertEqual(test_response.status_code, 200)\n self.assertTrue('object' in test_response.context) \n self.assertTemplateUsed(test_response, 'base.html')\n self.assertTemplateUsed(test_response, 'confirm_delete.html')",
"def test_delete(self):\n pass",
"def test_delete_collection_namespaced_template(self):\n pass",
"def test_workflows_id_templates_fk_delete(self):\n pass",
"def test_delete_run(self):\n pass",
"def test_delete_case(self):\n pass",
"def test_successfult_post_deletion(self):\n self.user.is_moderator = True\n self.user.save()\n response = self.client.delete(reverse('api:posts-detail', kwargs={'pk': self.post1.id}))\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(Post.objects.count(), 1)",
"def test_delete(self):\n pt = PrepTemplate.create(self.metadata, self.new_raw_data,\n self.test_study, self.data_type_id)\n PrepTemplate.delete(pt.id)\n\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.prep_template WHERE prep_template_id=2\")\n exp = []\n self.assertEqual(obs, exp)\n\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.common_prep_info WHERE prep_template_id=2\")\n exp = []\n self.assertEqual(obs, exp)\n\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.prep_columns WHERE prep_template_id=2\")\n exp = []\n self.assertEqual(obs, exp)\n\n with self.assertRaises(QiitaDBExecutionError):\n self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.prep_2\")",
"def test_unshare_template_registration(self):\n pass",
"def test_delete(admin_client):\n book = BookFactory()\n url = reverse(\"admin:books_book_delete\", args=(book.pk,))\n\n response = admin_client.get(url)\n templates_used = [t.name for t in response.templates]\n\n assert response.status_code == 200\n render_counts = {x: templates_used.count(x) for x in set(templates_used)}\n\n # The number of times each template was rendered\n assert render_counts == {\n \"admin/delete_confirmation.html\": 1,\n \"admin/base_site.html\": 1,\n \"admin/base.html\": 1,\n \"admin/includes/object_delete_summary.html\": 1,\n \"jazzmin/includes/ui_builder_panel.html\": 1,\n }\n\n # The templates that were used\n assert set(templates_used) == {\n \"admin/delete_confirmation.html\",\n \"admin/base_site.html\",\n \"admin/base.html\",\n \"admin/includes/object_delete_summary.html\",\n \"jazzmin/includes/ui_builder_panel.html\",\n }\n\n response = admin_client.post(url, data={\"post\": \"yes\"}, follow=True)\n\n # We deleted our object, and are now back on the changelist\n assert not Book.objects.all().exists()\n assert response.resolver_match.url_name == \"books_book_changelist\"",
"def test_delete1(self):\n pass",
"def test_client_verification_document_delete(self):\n pass",
"def test_delete7(self):\n pass",
"def test_delete_confirmation_template(self):\n self.login()\n\n # BlogIndex needs translated pages before child pages can be translated\n self.fr_blog_index = self.en_blog_index.copy_for_translation(self.fr_locale)\n # Create a copy of the en_blog_post object as a translated page\n self.fr_blog_post = self.en_blog_post.copy_for_translation(self.fr_locale)\n\n # Create an alias page to test the `translations_to_move_count`\n # in the template context\n new_page = CreatePageAliasAction(\n self.en_blog_post,\n recursive=False,\n parent=self.en_blog_index,\n update_slug=\"alias-page-slug\",\n user=None,\n )\n new_page.execute(skip_permission_checks=True)\n\n response = self.client.get(\n reverse(\n \"wagtailadmin_pages:delete\",\n args=(self.en_blog_post.id,),\n ),\n follow=True,\n )\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.context[\"translation_count\"], 1)\n self.assertEqual(response.context[\"translation_descendant_count\"], 0)\n self.assertIn(\n \"Deleting this page will also delete 1 translation of this page.\",\n response.content.decode(\"utf-8\"),\n )",
"def test_plusrecors_delete(self):\n f = FlatPage.objects.create(title = 'f',\n url = '/minus/upload/plus/done/',\n content = 'flapi')\n self.go200('minus_plus_upload')\n self.formfile('minus_plus_upload', 'file', AUDIO_FILE)\n self.submit200()\n self.url('minus_plus_upload_done')\n self.find('flapi')\n self.go200('minus_plus_upload')\n self.formfile('minus_plus_upload', 'file', AUDIO_FILE)\n self.submit200()\n self.assert_equal(MinusPlusRecord.objects.count(), 2)\n self.go200('minus_upload')\n self.formfile('minus_upload', 'file', AUDIO_FILE)\n self.submit200()\n self.find('Плюс')\n m = MinusRecord.objects.all()[0]\n p = MinusPlusRecord.objects.all()[0]\n self.assert_equal(p.minus , m)\n self.assert_equal(MinusPlusRecord.objects.count(), 1)\n self.go200('plus_delete', [p.id])\n self.fv('object_delete','__confirm__', 1)\n self.submit200()\n self.assert_equal(MinusPlusRecord.objects.count(), 0)",
"def test_delete(self):\n SampleTemplate.create(self.metadata, self.new_study)\n SampleTemplate.delete(2)\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.required_sample_info WHERE study_id=2\")\n exp = []\n self.assertEqual(obs, exp)\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.study_sample_columns WHERE study_id=2\")\n exp = []\n self.assertEqual(obs, exp)\n with self.assertRaises(QiitaDBExecutionError):\n self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.sample_2\")",
"def test_unregister_template(self):\n pass"
] | [
"0.8108582",
"0.80523485",
"0.80356413",
"0.7905152",
"0.76571864",
"0.76172143",
"0.7565759",
"0.7552634",
"0.74682933",
"0.74544466",
"0.73570675",
"0.7341402",
"0.7218155",
"0.7113023",
"0.7024291",
"0.7003252",
"0.69519466",
"0.6912048",
"0.69030714",
"0.6814051",
"0.6802641",
"0.67919284",
"0.67837423",
"0.67816675",
"0.67684025",
"0.67188036",
"0.67090416",
"0.66864455",
"0.6686104",
"0.66839916"
] | 0.8403106 | 0 |
test delete other users template as staff raises access control error | def test_delete_other_users_template_as_staff_raises_access_control_error(
self,
):
mock_request = create_mock_request(user=self.staff_user1)
with self.assertRaises(AccessControlError):
template_api.delete(
self.fixture.user2_template, request=mock_request
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_other_users_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user2_template, request=mock_request)",
"def test_delete_own_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_other_users_template_as_user_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_delete_global_template_as_user_raises_access_control_error(self):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_global_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_user_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user1_template, request=mock_request\n )",
"def test_delete_user(self):\n pass",
"def test_delete_user(self):\n pass",
"def test_delete_user_template_as_anonymous_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user1_template, request=mock_request\n )",
"def test_delete_permission(self):\r\n self.assertFalse(self.creator_admin.has_delete_permission(self.request))",
"def test_delete_activity_template(self):\n pass",
"def test_delete_non_owner(self):\n another_user = CustomUser.objects.create(id=134, email='[email protected]', is_active=True)\n another_user.set_password('qwerty12345')\n another_user.save()\n\n self.client.login(email='[email protected]', password='qwerty12345')\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 87876})\n\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 403)",
"def test_delete_global_template_as_anonymous_with_access_right_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.anonymous_user)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_team_template_folders_id_delete(self):\n pass",
"def test_delete_collection_user(self):\n pass",
"def test_delete_admin_from_org(self):\n pass",
"def test_delete(self):\n user = self.custodian_1_user\n urls = [reverse('api:user-detail', kwargs={'pk': user.pk})]\n data = None\n access = {\n \"forbidden\": [self.anonymous_client, self.readonly_client, self.custodian_1_client, self.admin_client,\n self.custodian_2_client],\n \"allowed\": []\n }\n\n for client in access['forbidden']:\n for url in urls:\n self.assertIn(\n client.delete(url, data, format='json').status_code,\n [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]\n )\n\n for client in access['allowed']:\n for url in urls:\n self.assertEqual(\n client.delete(url, data, format='json').status_code,\n status.HTTP_200_OK\n )",
"def test_delete_fail(self):\n self.user_api()\n self.base.metadata.create_all(self.engine)\n people = self.provision_users()\n p = {'id': people[2].id}\n self.delete('user', 403, params=p)",
"def test_not_creator_cannot_delete(self):\n\n logged_user = utils.create_user_and_authenticate(self)\n self.group.users.add(logged_user)\n expected_url = reverse('my_groups_view')\n\n utils.test_cannot_access(self, self.url, expected_url)\n self.assertEqual(len(Group.objects.all()), 1)",
"def allowed_topologytemplate_access_delete(user, template):\n try:\n up = user.get_profile()\n except AttributeError:\n return False\n\n return template.owner == user or user.has_perm(\"vnswww.topologytemplete_delete_any\") or (user.has_perm(\"vnswww.topologytemplete_delete_org\") and template.org == up.org)",
"def test_delete_template_subscription(self):\n pass",
"def test_jenkins_user_delete(self):\n ju = JenkinsUser.objects.get(username=\"user_1\")\n self.assertRaises(django.db.models.deletion.ProtectedError, ju.delete)",
"def test_delete_namespaced_template(self):\n pass",
"def test_admin_cannot_delete_non_existant_user(self):\n resp = self.admin_create_user()\n reply = self.admin_login()\n token = reply['token']\n \n resp = self.client.delete(\n '/api/v1/users/5',\n content_type='application/json',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n self.assertEqual(reply['message'], \"This attendant does not exist!\")\n self.assertEqual(resp.status_code, 404)",
"def test_groups_group_users_user_delete(self):\n pass",
"def test_groups_group_users_user_delete(self):\n pass",
"def test_creator_in_group_can_delete(self):\n\n self.client.login(username='notlogged', password='notlogged')\n expected_url = reverse('my_groups_view')\n\n utils.test_can_access(self, self.url,\n post_redirect_url=expected_url)\n self.assertEqual(len(Group.objects.all()), 0)"
] | [
"0.808326",
"0.802726",
"0.7965073",
"0.79332936",
"0.7800406",
"0.7799253",
"0.76195556",
"0.75638926",
"0.75588703",
"0.7483941",
"0.7483941",
"0.73990446",
"0.73645484",
"0.7327109",
"0.72690487",
"0.71860415",
"0.7184741",
"0.7150509",
"0.70893085",
"0.7082953",
"0.7061374",
"0.70404106",
"0.7038783",
"0.7038121",
"0.70271075",
"0.7008119",
"0.7004624",
"0.70023173",
"0.70023173",
"0.69911903"
] | 0.8265562 | 0 |
test delete global template as staff saves | def test_delete_global_template_as_staff_saves(self):
mock_request = create_mock_request(user=self.staff_user1)
template_api.delete(self.fixture.global_template, request=mock_request) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_own_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_namespaced_template(self):\n pass",
"def test_delete_activity_template(self):\n pass",
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_team_template_folders_id_delete(self):\n pass",
"def test_delete_template_subscription(self):\n pass",
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_device_template(self):\n pass",
"def test_unshare_template_registration(self):\n pass",
"def test_unregister_template(self):\n pass",
"def test_delete_subscription_template(self):\n pass",
"def test_xml_template_delete(self):\n XmlTemplate.delete_by_id(1)\n self.assertEqual(XmlTemplate.objects.count(), 1)\n self.assertFalse(XmlTemplate.objects.filter(pk=1).exists())",
"def test_team_template_folders_id_templates_fk_delete(self):\n pass",
"def test_delete_other_users_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user2_template, request=mock_request)",
"def test_delete_collection_namespaced_template(self):\n pass",
"def test_update_template_registration(self):\n pass",
"def test_delete_run(self):\n pass",
"def test_delete(self):\n pass",
"def test_delete_admin_from_org(self):\n pass",
"def test_commentary_view_delete(self):\n \n test_response = self.client.get('/papers/commentary/1/delete')\n self.assertEqual(test_response.status_code, 200)\n self.assertTrue('object' in test_response.context) \n self.assertTemplateUsed(test_response, 'base.html')\n self.assertTemplateUsed(test_response, 'confirm_delete.html')",
"def delete_custom_template(self, name, filename, context):\n pass",
"def test_workflows_id_templates_fk_delete(self):\n pass",
"def test_delete(admin_client):\n book = BookFactory()\n url = reverse(\"admin:books_book_delete\", args=(book.pk,))\n\n response = admin_client.get(url)\n templates_used = [t.name for t in response.templates]\n\n assert response.status_code == 200\n render_counts = {x: templates_used.count(x) for x in set(templates_used)}\n\n # The number of times each template was rendered\n assert render_counts == {\n \"admin/delete_confirmation.html\": 1,\n \"admin/base_site.html\": 1,\n \"admin/base.html\": 1,\n \"admin/includes/object_delete_summary.html\": 1,\n \"jazzmin/includes/ui_builder_panel.html\": 1,\n }\n\n # The templates that were used\n assert set(templates_used) == {\n \"admin/delete_confirmation.html\",\n \"admin/base_site.html\",\n \"admin/base.html\",\n \"admin/includes/object_delete_summary.html\",\n \"jazzmin/includes/ui_builder_panel.html\",\n }\n\n response = admin_client.post(url, data={\"post\": \"yes\"}, follow=True)\n\n # We deleted our object, and are now back on the changelist\n assert not Book.objects.all().exists()\n assert response.resolver_match.url_name == \"books_book_changelist\"",
"def unload_fixture(apps, schema_editor):\n\n objects = deserialize_fixture()\n\n EmailTemplate = apps.get_model(\"helpdesk\", \"emailtemplate\")\n EmailTemplate.objects.filter(pk__in=[ obj.object.pk for obj in objects ]).delete()",
"def test_functionality(self):\n templateName = \"Test Template\"\n self.browserObject = globalVars.browserObject\n \n #Check for current logged in user\n self.verifyCurrentUser(userRole='Administrator', loginAsUser=True)\n \n #Deleting Standard User\n userList = self.getLocalUsers(userName=globalVars.standardUser)\n if len(userList) > 0:\n self.deleteLocalUser(globalVars.standardUser, verifyUser=True)\n \n #Creates Sample Template if not exists\n self.createSampleTemplate(templateName=templateName, deleteAndCreate=True, publishedTemplate=True)",
"def test_delete7(self):\n pass",
"def test_delete1(self):\n pass",
"def test_upsert_global_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.upsert(self.fixture.global_template, request=mock_request)",
"def test_delete_global_template_as_user_raises_access_control_error(self):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )"
] | [
"0.8153071",
"0.7786319",
"0.7784371",
"0.7716912",
"0.75208735",
"0.7489113",
"0.7379321",
"0.73716503",
"0.72625434",
"0.72461313",
"0.71758413",
"0.71681505",
"0.71080875",
"0.709806",
"0.7042556",
"0.6959074",
"0.6730459",
"0.67151546",
"0.66676325",
"0.66467226",
"0.6606063",
"0.6600875",
"0.6581047",
"0.6563051",
"0.65554786",
"0.6541608",
"0.65096015",
"0.64963144",
"0.64875746",
"0.6475782"
] | 0.84990233 | 0 |
test delete own template as superuser saves | def test_delete_own_template_as_superuser_saves(self):
mock_request = create_mock_request(user=self.superuser1)
template_api.delete(self.fixture.user1_template, request=mock_request) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_other_users_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user2_template, request=mock_request)",
"def test_delete_own_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_activity_template(self):\n pass",
"def test_delete_global_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_template_subscription(self):\n pass",
"def test_delete_namespaced_template(self):\n pass",
"def test_delete_device_template(self):\n pass",
"def test_delete_subscription_template(self):\n pass",
"def test_xml_template_delete(self):\n XmlTemplate.delete_by_id(1)\n self.assertEqual(XmlTemplate.objects.count(), 1)\n self.assertFalse(XmlTemplate.objects.filter(pk=1).exists())",
"def test_post_delete_admin(self):\n url = reverse('post-detail', kwargs={'pk': self.post.id})\n self.client.force_authenticate(user=self.superuser)\n response = self.client.delete(url)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)",
"def test_commentary_view_delete(self):\n \n test_response = self.client.get('/papers/commentary/1/delete')\n self.assertEqual(test_response.status_code, 200)\n self.assertTrue('object' in test_response.context) \n self.assertTemplateUsed(test_response, 'base.html')\n self.assertTemplateUsed(test_response, 'confirm_delete.html')",
"def test_team_template_folders_id_delete(self):\n pass",
"def test_delete_user(self):\n pass",
"def test_delete_user(self):\n pass",
"def test_successfult_post_deletion(self):\n self.user.is_moderator = True\n self.user.save()\n response = self.client.delete(reverse('api:posts-detail', kwargs={'pk': self.post1.id}))\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(Post.objects.count(), 1)",
"def test_team_template_folders_id_templates_fk_delete(self):\n pass",
"def test_delete_permission(self):\r\n self.assertFalse(self.creator_admin.has_delete_permission(self.request))",
"def test_delete(self):\n pass",
"def test_delete_global_template_as_user_raises_access_control_error(self):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_unregister_template(self):\n pass",
"def test_unshare_template_registration(self):\n pass",
"def test_delete(admin_client):\n book = BookFactory()\n url = reverse(\"admin:books_book_delete\", args=(book.pk,))\n\n response = admin_client.get(url)\n templates_used = [t.name for t in response.templates]\n\n assert response.status_code == 200\n render_counts = {x: templates_used.count(x) for x in set(templates_used)}\n\n # The number of times each template was rendered\n assert render_counts == {\n \"admin/delete_confirmation.html\": 1,\n \"admin/base_site.html\": 1,\n \"admin/base.html\": 1,\n \"admin/includes/object_delete_summary.html\": 1,\n \"jazzmin/includes/ui_builder_panel.html\": 1,\n }\n\n # The templates that were used\n assert set(templates_used) == {\n \"admin/delete_confirmation.html\",\n \"admin/base_site.html\",\n \"admin/base.html\",\n \"admin/includes/object_delete_summary.html\",\n \"jazzmin/includes/ui_builder_panel.html\",\n }\n\n response = admin_client.post(url, data={\"post\": \"yes\"}, follow=True)\n\n # We deleted our object, and are now back on the changelist\n assert not Book.objects.all().exists()\n assert response.resolver_match.url_name == \"books_book_changelist\"",
"def test_delete_user_field(self):\n pass",
"def test_delete_confirmation_template(self):\n self.login()\n\n # BlogIndex needs translated pages before child pages can be translated\n self.fr_blog_index = self.en_blog_index.copy_for_translation(self.fr_locale)\n # Create a copy of the en_blog_post object as a translated page\n self.fr_blog_post = self.en_blog_post.copy_for_translation(self.fr_locale)\n\n # Create an alias page to test the `translations_to_move_count`\n # in the template context\n new_page = CreatePageAliasAction(\n self.en_blog_post,\n recursive=False,\n parent=self.en_blog_index,\n update_slug=\"alias-page-slug\",\n user=None,\n )\n new_page.execute(skip_permission_checks=True)\n\n response = self.client.get(\n reverse(\n \"wagtailadmin_pages:delete\",\n args=(self.en_blog_post.id,),\n ),\n follow=True,\n )\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.context[\"translation_count\"], 1)\n self.assertEqual(response.context[\"translation_descendant_count\"], 0)\n self.assertIn(\n \"Deleting this page will also delete 1 translation of this page.\",\n response.content.decode(\"utf-8\"),\n )",
"def test_delete_collection_user(self):\n pass",
"def test_upsert_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.upsert(self.fixture.user1_template, request=mock_request)",
"def test_delete_collection_namespaced_template(self):\n pass",
"def test_delete_template_success(self):\n template_id = util.MOCK_UUID_1\n\n rv = TEST_CLIENT.delete(f\"/templates/{template_id}\")\n result = rv.json()\n\n expected = {\"message\": \"Template deleted\"}\n self.assertDictEqual(expected, result)"
] | [
"0.8280397",
"0.8224194",
"0.8060117",
"0.78996503",
"0.75895417",
"0.7520532",
"0.7519773",
"0.75025415",
"0.73786604",
"0.73680294",
"0.721002",
"0.70879525",
"0.702414",
"0.70129967",
"0.7012797",
"0.7012797",
"0.6967623",
"0.6949611",
"0.68701947",
"0.68661785",
"0.6793061",
"0.67817277",
"0.6778631",
"0.67479277",
"0.67412865",
"0.67247736",
"0.66937214",
"0.6683149",
"0.66776276",
"0.6643971"
] | 0.8697194 | 0 |
test delete other users template as superuser saves | def test_delete_other_users_template_as_superuser_saves(self):
mock_request = create_mock_request(user=self.superuser1)
template_api.delete(self.fixture.user2_template, request=mock_request) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_own_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_global_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_user(self):\n pass",
"def test_delete_user(self):\n pass",
"def test_delete_activity_template(self):\n pass",
"def test_delete_device_template(self):\n pass",
"def test_delete_template_subscription(self):\n pass",
"def test_delete_subscription_template(self):\n pass",
"def test_delete_namespaced_template(self):\n pass",
"def test_delete_user_field(self):\n pass",
"def test_successfult_post_deletion(self):\n self.user.is_moderator = True\n self.user.save()\n response = self.client.delete(reverse('api:posts-detail', kwargs={'pk': self.post1.id}))\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(Post.objects.count(), 1)",
"def test_delete_collection_user(self):\n pass",
"def test_xml_template_delete(self):\n XmlTemplate.delete_by_id(1)\n self.assertEqual(XmlTemplate.objects.count(), 1)\n self.assertFalse(XmlTemplate.objects.filter(pk=1).exists())",
"def test_team_template_folders_id_templates_fk_delete(self):\n pass",
"def test_delete_other_users_template_as_user_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_users_username_delete(self):\n pass",
"def test_delete_function(self): \n self.new_user.save_prof()\n user2 = User_prof(username = \"mbugua\", bio = \"the world revolves\" ) \n user2.save_prof()\n \n user2.delete_prof()\n all = User_prof.objects.all()\n self.assertEqual(len(all),1)",
"def test_team_template_folders_id_delete(self):\n pass",
"def delete_user():",
"def test_user_id_delete(self):\n pass",
"def test_products_ref_users_user_delete(self):\n pass",
"def test_delete_device_user(self):\n pass",
"def test_groups_group_users_user_delete(self):\n pass",
"def test_groups_group_users_user_delete(self):\n pass",
"def test_delete_other_users_template_as_staff_raises_access_control_error(\n self,\n ):\n mock_request = create_mock_request(user=self.staff_user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.user2_template, request=mock_request\n )",
"def test_jenkins_user_delete(self):\n ju = JenkinsUser.objects.get(username=\"user_1\")\n self.assertRaises(django.db.models.deletion.ProtectedError, ju.delete)",
"def test_project_get_deleted_upon_user_delete(self):\n\n project = django_dynamic_fixture.get(Project)\n user1 = django_dynamic_fixture.get(User)\n project.users.add(user1)\n\n project.refresh_from_db()\n assert project.users.all().count() == 1\n\n # Delete the user\n user1.delete()\n # The object should not exist\n project = Project.objects.all().filter(id=project.id)\n assert not project.exists()"
] | [
"0.8817737",
"0.85475934",
"0.8236777",
"0.7992185",
"0.7491608",
"0.7396679",
"0.7396679",
"0.7336298",
"0.725919",
"0.72099054",
"0.7088548",
"0.7082489",
"0.7050118",
"0.70074046",
"0.69393295",
"0.6897725",
"0.688309",
"0.6881316",
"0.687922",
"0.68716925",
"0.68648237",
"0.6858488",
"0.68578494",
"0.6855873",
"0.68439484",
"0.68362015",
"0.68362015",
"0.6825118",
"0.68187386",
"0.6805314"
] | 0.86611134 | 1 |
test delete global template as superuser saves | def test_delete_global_template_as_superuser_saves(self):
mock_request = create_mock_request(user=self.superuser1)
template_api.delete(self.fixture.global_template, request=mock_request) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_own_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_global_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.global_template, request=mock_request)",
"def test_delete_other_users_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.delete(self.fixture.user2_template, request=mock_request)",
"def test_delete_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_namespaced_template(self):\n pass",
"def test_delete_activity_template(self):\n pass",
"def test_delete_own_template_as_staff_saves(self):\n mock_request = create_mock_request(user=self.staff_user1)\n template_api.delete(self.fixture.user1_template, request=mock_request)",
"def test_delete_device_template(self):\n pass",
"def test_delete_template_subscription(self):\n pass",
"def test_unshare_template_registration(self):\n pass",
"def test_unregister_template(self):\n pass",
"def test_delete_subscription_template(self):\n pass",
"def test_delete_global_template_as_user_raises_access_control_error(self):\n mock_request = create_mock_request(user=self.user1)\n with self.assertRaises(AccessControlError):\n template_api.delete(\n self.fixture.global_template, request=mock_request\n )",
"def test_team_template_folders_id_delete(self):\n pass",
"def test_upsert_global_template_as_superuser_saves(self):\n mock_request = create_mock_request(user=self.superuser1)\n template_api.upsert(self.fixture.global_template, request=mock_request)",
"def test_xml_template_delete(self):\n XmlTemplate.delete_by_id(1)\n self.assertEqual(XmlTemplate.objects.count(), 1)\n self.assertFalse(XmlTemplate.objects.filter(pk=1).exists())",
"def test_team_template_folders_id_templates_fk_delete(self):\n pass",
"def test_functionality(self):\n templateName = \"Test Template\"\n self.browserObject = globalVars.browserObject\n \n #Check for current logged in user\n self.verifyCurrentUser(userRole='Administrator', loginAsUser=True)\n \n #Deleting Standard User\n userList = self.getLocalUsers(userName=globalVars.standardUser)\n if len(userList) > 0:\n self.deleteLocalUser(globalVars.standardUser, verifyUser=True)\n \n #Creates Sample Template if not exists\n self.createSampleTemplate(templateName=templateName, deleteAndCreate=True, publishedTemplate=True)",
"def test_delete_user(self):\n pass",
"def test_delete_user(self):\n pass",
"def delete_custom_template(self, name, filename, context):\n pass",
"def test_delete_collection_namespaced_template(self):\n pass",
"def test_update_template_registration(self):\n pass",
"def test_delete_admin_from_org(self):\n pass",
"def pre_service_template_delete(self, resource_id):\n pass",
"def test_post_delete_admin(self):\n url = reverse('post-detail', kwargs={'pk': self.post.id})\n self.client.force_authenticate(user=self.superuser)\n response = self.client.delete(url)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)",
"def test_commentary_view_delete(self):\n \n test_response = self.client.get('/papers/commentary/1/delete')\n self.assertEqual(test_response.status_code, 200)\n self.assertTrue('object' in test_response.context) \n self.assertTemplateUsed(test_response, 'base.html')\n self.assertTemplateUsed(test_response, 'confirm_delete.html')",
"def test_delete(self):\n pass",
"def delete_template(self):\n try:\n os.remove(self.path)\n except Exception:\n pass",
"def test_delete_permission(self):\r\n self.assertFalse(self.creator_admin.has_delete_permission(self.request))"
] | [
"0.8211604",
"0.8005223",
"0.7721632",
"0.771418",
"0.756075",
"0.73948276",
"0.7389841",
"0.7346052",
"0.7268417",
"0.72225857",
"0.7203136",
"0.7147506",
"0.70092875",
"0.69376206",
"0.68497753",
"0.68214715",
"0.670203",
"0.66734916",
"0.66634125",
"0.66634125",
"0.65859646",
"0.65832746",
"0.65592617",
"0.6544842",
"0.64714414",
"0.6448012",
"0.6447797",
"0.64382035",
"0.64227635",
"0.6405539"
] | 0.8784708 | 0 |
Creates a breakpoint, which is a renamed deep copy of the QuantumCircuit, and creates and appends an AssertUniform instruction to its end. If the statistical test passes, the assertion passes; if the test fails, the assertion fails. | def get_breakpoint_uniform(self, qubit, cbit, pcrit=0.05):
clone = self.copy(Asserts._new_breakpoint_name())
assertion = AssertUniform(qubit, cbit, pcrit, False)
clone.append(assertion, [assertion._qubit], [assertion._cbit])
return clone | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_breakpoint_not_uniform(self, qubit, cbit, pcrit=0.05):\n clone = self.copy(Asserts._new_breakpoint_name())\n assertion = AssertUniform(qubit, cbit, pcrit, True)\n clone.append(assertion, [assertion._qubit], [assertion._cbit])\n return clone",
"def get_breakpoint_product(self, qubit0, cbit0, qubit1, cbit1, pcrit=0.05):\n clone = self.copy(Asserts._new_breakpoint_name())\n assertion = AssertProduct(qubit0, cbit0, qubit1, cbit1, pcrit, False)\n clone.append(assertion, [assertion._qubit], [assertion._cbit])\n return clone",
"def get_breakpoint_not_product(self, qubit0, cbit0, qubit1, cbit1, pcrit=0.05):\n clone = self.copy(Asserts._new_breakpoint_name())\n assertion = AssertProduct(qubit0, cbit0, qubit1, cbit1, pcrit, True)\n clone.append(assertion, [assertion._qubit], [assertion._cbit])\n return clone",
"def test(self):\n self.gdb.command(\"b just_before_write_loop\")\n self.gdb.c()\n write_loop = self.gdb.p(\"&write_loop\")\n self.gdb.command(\"watch data\")\n self.gdb.c()\n # Accept hitting the breakpoint before or after the store instruction.\n assertIn(self.gdb.p(\"$pc\"), [write_loop, write_loop + 4])\n assertEqual(self.gdb.p(\"$a0\"), self.gdb.p(\"&data\"))",
"def do_test(self):\n exe_name = \"a.out\"\n exe = self.getBuildArtifact(exe_name)\n\n # Create the target\n target = self.dbg.CreateTarget(exe)\n self.assertTrue(target, VALID_TARGET)\n\n # Target variables. This is not actually expected to work, but\n # also shouldn't crash.\n g_counter = target.EvaluateExpression(\"g_counter\")\n self.assertTrue(\n g_counter.IsValid(),\n \"g_counter returned a valid value object.\")\n\n # Set the breakpoints\n outer_bkpt = target.BreakpointCreateBySourceRegex(\n 'Set top_level breakpoint here', self.main_source_spec)\n self.assertTrue(outer_bkpt.GetNumLocations() > 0, VALID_BREAKPOINT)\n\n function_bkpt = target.BreakpointCreateBySourceRegex(\n 'Set function breakpoint here', self.main_source_spec)\n self.assertTrue(function_bkpt.GetNumLocations() > 0, VALID_BREAKPOINT)\n\n # Launch the process, and do not stop at the entry point.\n process = target.LaunchSimple(None, None, os.getcwd())\n\n self.assertTrue(process, PROCESS_IS_VALID)\n\n # Frame #0 should be at our breakpoint.\n threads = lldbutil.get_threads_stopped_at_breakpoint(\n process, outer_bkpt)\n\n self.assertTrue(len(threads) == 1)\n self.thread = threads[0]\n\n # All the variables should be uninitialized at this point. Maybe sure\n # they look that way:\n frame = self.thread.frames[0]\n options = lldb.SBExpressionOptions()\n options.SetFetchDynamicValue(lldb.eDynamicCanRunTarget)\n\n error = lldb.SBError()\n\n # Examine the variables before initialization:\n\n g_counter = frame.EvaluateExpression(\"g_counter\", options)\n self.assertTrue(\n g_counter.IsValid(),\n \"g_counter returned a valid value object.\")\n value = g_counter.GetValueAsSigned(error)\n self.assertTrue(error.Success(), \"Got a value for g_counter\")\n self.assertTrue(\n value == 0,\n \"g_counter value is the uninitialized one.\")\n\n foo_var = frame.EvaluateExpression(\"my_foo\", options)\n self.assertTrue(\n foo_var.IsValid(),\n \"foo_var returned a valid value object.\")\n value = foo_var.GetValueAsUnsigned(error)\n self.assertTrue(error.Success(), \"foo_var has a value.\")\n self.assertTrue(value == 0, \"foo_var is null before initialization.\")\n\n my_large_dude = frame.EvaluateExpression(\"my_large_dude\", options)\n self.assertTrue(my_large_dude.IsValid(),\n \"my_large_dude returned a valid value object.\")\n value = my_large_dude.GetValue()\n self.assertTrue(error.Success(), \"Got a value for my_large_dude\")\n self.assertTrue(\n value is None,\n \"my_large_dude value is the uninitialized one.\")\n\n # Now proceed to the breakpoint in our main function, make sure we can\n # still read these variables and they now have the right values.\n threads = lldbutil.continue_to_breakpoint(process, function_bkpt)\n self.assertTrue(len(threads) == 1)\n\n self.thread = threads[0]\n\n # Examine the variables before initialization:\n\n g_counter = frame.EvaluateExpression(\"g_counter\", options)\n self.assertTrue(\n g_counter.IsValid(),\n \"g_counter returned a valid value object.\")\n value = g_counter.GetValueAsSigned(error)\n self.assertTrue(error.Success(), \"Got a value for g_counter\")\n self.assertTrue(value == 2, \"g_counter value should be 2.\")\n\n foo_var = frame.EvaluateExpression(\"my_foo\", options)\n self.assertTrue(\n foo_var.IsValid(),\n \"foo_var returned a valid value object.\")\n foo_var_x = foo_var.GetChildMemberWithName(\"x\")\n self.assertTrue(foo_var_x.IsValid(), \"Got value object for foo_var.x\")\n value = foo_var_x.GetValueAsUnsigned(error)\n self.assertTrue(error.Success(), \"foo_var.x has a value.\")\n self.assertTrue(value == 1, \"foo_var is null before initialization.\")\n\n my_large_dude = frame.EvaluateExpression(\"my_large_dude\", options)\n self.assertTrue(my_large_dude.IsValid(),\n \"my_large_dude returned a valid value object.\")\n my_large_dude_y = my_large_dude.GetChildMemberWithName(\"y\")\n self.assertTrue(\n my_large_dude_y.IsValid(),\n \"Got value object for my_large_dude.y\")\n value = my_large_dude_y.GetValueAsUnsigned(error)\n self.assertTrue(error.Success(), \"Got a value for my_large_dude.y\")\n self.assertTrue(\n value == 20,\n \"my_large_dude value is the uninitialized one.\")",
"def test_debugger_api_boundary_condition(self):\n self.dbg.HandleCommand(None)\n self.dbg.SetDefaultArchitecture(None)\n self.dbg.GetScriptingLanguage(None)\n self.dbg.CreateTarget(None)\n self.dbg.CreateTarget(None, None, None, True, lldb.SBError())\n self.dbg.CreateTargetWithFileAndTargetTriple(None, None)\n self.dbg.CreateTargetWithFileAndArch(None, None)\n self.dbg.FindTargetWithFileAndArch(None, None)\n self.dbg.SetInternalVariable(None, None, None)\n self.dbg.GetInternalVariableValue(None, None)\n # FIXME (filcab): We must first allow for the swig bindings to know if\n # a Python callback is set. (Check python-typemaps.swig)\n # self.dbg.SetLoggingCallback(None)\n self.dbg.SetPrompt(None)\n self.dbg.SetCurrentPlatform(None)\n self.dbg.SetCurrentPlatformSDKRoot(None)\n \n fresh_dbg = lldb.SBDebugger()\n self.assertEquals(len(fresh_dbg), 0)",
"def test(self):\n # 0x13 is nop\n self.gdb.command(\"p *((int*) 0x%x)=0x13\" % self.target.ram)\n self.gdb.command(\"p *((int*) 0x%x)=0x13\" % (self.target.ram + 4))\n self.gdb.command(\"p *((int*) 0x%x)=0x13\" % (self.target.ram + 8))\n self.gdb.p(\"$pc=0x%x\" % self.target.ram)\n self.gdb.stepi()\n assertEqual((self.target.ram + 4), self.gdb.p(\"$pc\"))\n self.gdb.stepi()\n assertEqual((self.target.ram + 8), self.gdb.p(\"$pc\"))",
"def _analyse_stmt_Assert(self, statement: ast.Assert, *, next: CFNode) -> CFNode:\n test_is_constant, test_value = self._expression_as_constant(statement.test)\n\n branches: Dict[str, CFNode] = {}\n if test_is_constant:\n if test_value:\n branches.update(next=next)\n else:\n branches.update(error=self._raise)\n else:\n branches.update(next=next, error=self._raise)\n\n return self._ast_node(statement, **branches)",
"def test_add_circuit_noname(self):\n q_program = QuantumProgram()\n qr = q_program.create_quantum_register(size=2)\n cr = q_program.create_classical_register(size=2)\n qc1 = q_program.create_circuit(qregisters=[qr], cregisters=[cr])\n qc2 = q_program.create_circuit(qregisters=[qr], cregisters=[cr])\n qc1.h(qr[0])\n qc1.measure(qr[0], cr[0])\n qc2.measure(qr[1], cr[1])\n new_circuit = qc1 + qc2\n q_program.add_circuit(quantum_circuit=new_circuit)\n backend = 'local_qasm_simulator_py' # cpp simulator rejects non string IDs (FIXME)\n shots = 1024\n result = q_program.execute(backend=backend, shots=shots, seed=78)\n counts = result.get_counts(new_circuit.name)\n target = {'00': shots / 2, '01': shots / 2}\n threshold = 0.04 * shots\n self.assertDictAlmostEqual(counts, target, threshold)\n self.assertRaises(QISKitError, result.get_counts)",
"def test_normal_circuit(self):\n filename = self._get_resource_path('test_normal.tex')\n qc = QuantumCircuit(5)\n for qubit in range(5):\n qc.h(qubit)\n\n circuit_drawer(qc, filename=filename, output='latex_source')\n\n self.assertEqualToReference(filename)",
"def test_analytic_value_with_simple_circuit(self, differentiator, op):\n # Get an expectation op, with this differentiator attached.\n differentiator.refresh()\n op = differentiator.generate_differentiable_op(analytic_op=op)\n qubit = cirq.GridQubit(0, 0)\n circuit = util.convert_to_tensor(\n [cirq.Circuit(cirq.X(qubit)**sympy.Symbol('alpha'))])\n psums = util.convert_to_tensor([[cirq.Z(qubit)]])\n symbol_values_array = np.array([[0.123]], dtype=np.float32)\n # Calculate tfq gradient.\n symbol_values_tensor = tf.convert_to_tensor(symbol_values_array)\n with tf.GradientTape() as g:\n g.watch(symbol_values_tensor)\n expectations = op(circuit, tf.convert_to_tensor(['alpha']),\n symbol_values_tensor, psums)\n grads = g.gradient(expectations, symbol_values_tensor)\n ground_truth_grads = np.array([[-1.1839752]])\n self.assertAllClose(ground_truth_grads, grads, rtol=1e-2, atol=1e-2)",
"def test_issue14355(self):\n\n def make_diagram():\n # Use a nested function to ensure that all locals get garbage\n # collected quickly.\n\n # Construct a trivial plant and ID controller.\n # N.B. We explicitly do *not* add this plant to the diagram.\n controller_plant = MultibodyPlant(time_step=0.002)\n controller_plant.Finalize()\n builder = DiagramBuilder()\n controller = builder.AddSystem(\n InverseDynamicsController(\n controller_plant,\n kp=[],\n ki=[],\n kd=[],\n has_reference_acceleration=False,\n )\n )\n # Forward ports for ease of testing.\n builder.ExportInput(\n controller.get_input_port_estimated_state(), \"x_estimated\")\n builder.ExportInput(\n controller.get_input_port_desired_state(), \"x_desired\")\n builder.ExportOutput(controller.get_output_port_control(), \"u\")\n diagram = builder.Build()\n return diagram\n\n diagram = make_diagram()\n # N.B. Without the workaround for #14355, we get a segfault when\n # creating the context.\n context = diagram.CreateDefaultContext()\n diagram.GetInputPort(\"x_estimated\").FixValue(context, [])\n diagram.GetInputPort(\"x_desired\").FixValue(context, [])\n u = diagram.GetOutputPort(\"u\").Eval(context)\n np.testing.assert_equal(u, [])",
"def test_circuit_init(self):\n circuit, target = self.simple_circuit_no_measure()\n op = Chi(circuit)\n target = Chi(target)\n self.assertEqual(op, target)",
"def test(self):\n self.gdb.b(\"main:start\")\n self.gdb.c()\n self.gdb.command(\"p i=0\")\n last_pc = None\n advances = 0\n jumps = 0\n for _ in range(100):\n self.gdb.stepi()\n pc = self.gdb.p(\"$pc\")\n assertNotEqual(last_pc, pc)\n if last_pc and pc > last_pc and pc - last_pc <= 4:\n advances += 1\n else:\n jumps += 1\n last_pc = pc\n # Some basic sanity that we're not running between breakpoints or\n # something.\n assertGreater(jumps, 10)\n assertGreater(advances, 50)",
"def test_orthorhombic_sims(cell_dimensions, crystal_params):\n # Multiple of 6 works nicely with the p2 crystal\n cell_dimensions = cell_dimensions * 6\n with crystal_params.temp_context(cell_dimensions=cell_dimensions):\n snapshot = init_from_crystal(crystal_params)\n snapshot = equilibrate(snapshot, crystal_params, equil_type=\"crystal\")\n snapshot = make_orthorhombic(snapshot)\n temp_context = hoomd.context.initialize(crystal_params.hoomd_args)\n production(snapshot, temp_context, crystal_params, dynamics=False)",
"def test_defcamp_2015(self):\n # Load the binary\n binary_file = os.path.join(os.path.dirname(__file__), \"misc\", \"defcamp-2015-r100.bin\")\n self.load_binary(binary_file)\n\n # Define a fake stack\n self.Triton.setConcreteRegisterValue(self.Triton.registers.rbp, 0x7fffffff)\n self.Triton.setConcreteRegisterValue(self.Triton.registers.rsp, 0x6fffffff)\n\n # Define an user input\n self.Triton.setConcreteRegisterValue(self.Triton.registers.rdi, 0x10000000)\n\n # Symbolize user inputs (30 bytes)\n for index in range(30):\n self.Triton.symbolizeMemory(MemoryAccess(0x10000000+index, CPUSIZE.BYTE))\n\n # Emulate from the verification function\n solution = self.emulate(0x4006FD)\n self.assertEqual(solution, 'Code_Talkers')",
"def test_deep_circuit(self):\n filename = self._get_resource_path('test_deep.tex')\n qc = QuantumCircuit(1)\n for _ in range(100):\n qc.h(0)\n\n circuit_drawer(qc, filename=filename, output='latex_source')\n\n self.assertEqualToReference(filename)",
"def test_signal_generation(fprime_test_api):\n fprime_test_api.send_and_assert_command(\n \"SG4.SignalGen_Settings\", [1, 5, 0, \"SQUARE\"]\n )\n # First telemetry item should fill only the first slot of the history\n history = [0, 0, 0, 5]\n pair_history = [{\"time\": 0, \"value\": value} for value in history]\n info = {\"type\": \"SQUARE\", \"history\": history, \"pairHistory\": pair_history}\n fprime_test_api.send_and_assert_command(\"SG4.SignalGen_Toggle\")\n fprime_test_api.assert_telemetry(\"SG4.History\", history, timeout=6)\n fprime_test_api.assert_telemetry(\"SG4.PairHistory\", pair_history, timeout=1)\n fprime_test_api.assert_telemetry(\"SG4.Info\", info, timeout=1)\n fprime_test_api.send_and_assert_command(\"SG4.SignalGen_Toggle\")",
"def _test_ic_wire_step(thick_width = 10, thin_width = 1, wire_layer = 2):\n WS4 = Device('test_ic_step')\n wire_stepa = WS4.add_ref(optimal_step(thick_width/2, thin_width/2,\n layer = wire_layer))\n wire_stepb = WS4.add_ref(optimal_step(thin_width/2, thick_width/2,\n layer = wire_layer))\n wire_stepc = WS4.add_ref(optimal_step(thick_width/2, thin_width/2,\n layer = wire_layer))\n wire_stepd = WS4.add_ref(optimal_step(thin_width/2, thick_width/2,\n layer = wire_layer))\n wire_stepb.rotate(180)\n wire_stepb.xmin = wire_stepa.xmin\n wire_stepc.rotate(180)\n wire_stepc.xmin = wire_stepa.xmax\n wire_stepd.xmin = wire_stepc.xmin\n return WS4",
"def test_dummy():\n # ARRANGE\n number = 1\n # ACT\n number += 1\n # ASSERT\n assert number == 2",
"def test_constructor(self, circuit):\n assert list(circuit.wires) == [jet.Wire(i, 0, False) for i in range(4)]\n assert list(circuit.operations) == [jet.Operation(jet.Qubit(), [i]) for i in range(4)]",
"def _assert(condition, message):\n if not condition:\n raise AssertionError(message)",
"def test(self):\n self.build(dictionary={\"CXX_SOURCES\": \"main.cpp\", \"EXE\": \"a.out\"})\n\n exe = self.getBuildArtifact(\"a.out\")\n target = self.dbg.CreateTarget(exe)\n target.BreakpointCreateBySourceRegex(\"return\", lldb.SBFileSpec(\"rebuild.cpp\"))\n target.BreakpointCreateBySourceRegex(\"return\", lldb.SBFileSpec(\"main.cpp\"))\n process = target.LaunchSimple(None, None, self.get_process_working_directory())\n\n self.expect_expr(\n \"foo\",\n result_type=\"Foo\",\n result_children=[ValueCheck(name=\"m_val\", value=\"42\")],\n )\n\n # Delete the executable to force make to rebuild it.\n remove_file(exe)\n self.build(dictionary={\"CXX_SOURCES\": \"rebuild.cpp\", \"EXE\": \"a.out\"})\n\n # Rerun program within the same target\n process.Destroy()\n process = target.LaunchSimple(None, None, self.get_process_working_directory())\n\n self.expect_expr(\n \"foo\",\n result_type=\"Foo\",\n result_children=[\n ValueCheck(\n name=\"Base\", children=[ValueCheck(name=\"m_base_val\", value=\"42\")]\n ),\n ValueCheck(name=\"m_derived_val\", value=\"137\"),\n ],\n )\n\n self.filecheck(\"target module dump ast\", __file__)\n\n # The new definition 'struct Foo' is in the scratch AST\n # CHECK: |-CXXRecordDecl {{.*}} struct Foo definition\n # CHECK: | |-public 'Base'\n # CHECK-NEXT: | `-FieldDecl {{.*}} m_derived_val 'int'\n # CHECK-NEXT: `-CXXRecordDecl {{.*}} struct Base definition\n # CHECK: `-FieldDecl {{.*}} m_base_val 'int'\n\n # ...but the original definition of 'struct Foo' is not in the scratch AST anymore\n # CHECK-NOT: FieldDecl {{.*}} m_val 'int'",
"def test_wires_expval(\n self, device, circuit_factory, wires1, wires2, tol\n ): # pylint: disable=too-many-arguments\n dev1 = device(wires1)\n dev2 = device(wires2)\n\n circuit1 = circuit_factory(dev1, wires1)\n circuit2 = circuit_factory(dev2, wires2)\n\n assert np.allclose(circuit1(), circuit2(), atol=tol(dev1.shots))",
"def test_example():\n x = 0\n y = 1\n assert x != y",
"def add_breakpoint():\n raise NotImplementedError()",
"def assertVariableValue(self, file, a, b):\n file.write(\"ASSERT({} = {});\\n\".format(a, b))\n return",
"def test_simple_assertions2(self):\n kb = logic.PropKB()\n kb.tell(logic.expr('color(cat, coat, black)'))\n self.assertAllBindingsEqual(\n kb.ask_all(logic.expr('color(cat, coat, black)')),\n [{}])\n kb.tell(logic.expr('age(cat, toy, 35)'))\n self.assertAllBindingsEqual(\n kb.ask_all(logic.expr('age(cat, toy, 35)')),\n [{}])\n kb.tell(logic.expr('color(cat, mitten, left, black)'))\n self.assertAllBindingsEqual(\n kb.ask_all(logic.expr('color(cat, mitten, left, black)')),\n [{}])\n kb.tell(logic.expr('age(cat, toy, top, 35)'))\n self.assertAllBindingsEqual(\n kb.ask_all(logic.expr('age(cat, toy, top, 35)')),\n [{}])\n kb.tell(logic.expr('age(cat, toy, top, x, y, z, 35)'))\n self.assertAllBindingsEqual(\n kb.ask_all(logic.expr('age(cat, toy, top, x, y, z, 35)')),\n [{}])",
"def visit_AssertStatNode(self, node):\n self.mark_position(node)\n next_block = self.flow.newblock()\n parent = self.flow.block\n # failure case\n parent = self.flow.nextblock(parent)\n self._visit(node.condition)\n self.flow.nextblock()\n self._visit(node.exception)\n if self.flow.block:\n self.flow.block.add_child(next_block)\n parent.add_child(next_block)\n if next_block.parents:\n self.flow.block = next_block\n else:\n self.flow.block = None\n return node",
"def check_trace(self, step_method):\n n_steps = 100\n with Model():\n x = Normal('x', mu=0, sd=1)\n if step_method.__name__ == 'SMC':\n Deterministic('like', - 0.5 * tt.log(2 * np.pi) - 0.5 * x.T.dot(x))\n trace = smc.ATMIP_sample(n_steps=n_steps, step=step_method(random_seed=1),\n n_jobs=1, progressbar=False, stage='0',\n homepath=self.temp_dir)\n else:\n trace = sample(n_steps, step=step_method(), random_seed=1)\n\n print(repr(trace.get_values('x')))\n assert_array_almost_equal(\n trace.get_values('x'),\n self.master_samples[step_method],\n decimal=select_by_precision(float64=6, float32=4))"
] | [
"0.615157",
"0.5664515",
"0.5564676",
"0.51173455",
"0.5042951",
"0.49747887",
"0.49527636",
"0.4948408",
"0.48849374",
"0.48534235",
"0.48316854",
"0.48037466",
"0.4722693",
"0.47221145",
"0.4704494",
"0.47041127",
"0.46748686",
"0.4669761",
"0.46511495",
"0.4641877",
"0.46392068",
"0.4616255",
"0.460738",
"0.46011716",
"0.4594634",
"0.45938447",
"0.45861572",
"0.45852068",
"0.455166",
"0.45421252"
] | 0.6336997 | 0 |
Creates a breakpoint, which is a renamed deep copy of the QuantumCircuit, and creates and appends an AssertUniform instruction to its end. If the statistical test passes, the assertion fails; if the test fails, the assertion passes. | def get_breakpoint_not_uniform(self, qubit, cbit, pcrit=0.05):
clone = self.copy(Asserts._new_breakpoint_name())
assertion = AssertUniform(qubit, cbit, pcrit, True)
clone.append(assertion, [assertion._qubit], [assertion._cbit])
return clone | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_breakpoint_uniform(self, qubit, cbit, pcrit=0.05):\n clone = self.copy(Asserts._new_breakpoint_name())\n assertion = AssertUniform(qubit, cbit, pcrit, False)\n clone.append(assertion, [assertion._qubit], [assertion._cbit])\n return clone",
"def get_breakpoint_product(self, qubit0, cbit0, qubit1, cbit1, pcrit=0.05):\n clone = self.copy(Asserts._new_breakpoint_name())\n assertion = AssertProduct(qubit0, cbit0, qubit1, cbit1, pcrit, False)\n clone.append(assertion, [assertion._qubit], [assertion._cbit])\n return clone",
"def get_breakpoint_not_product(self, qubit0, cbit0, qubit1, cbit1, pcrit=0.05):\n clone = self.copy(Asserts._new_breakpoint_name())\n assertion = AssertProduct(qubit0, cbit0, qubit1, cbit1, pcrit, True)\n clone.append(assertion, [assertion._qubit], [assertion._cbit])\n return clone",
"def test(self):\n self.gdb.command(\"b just_before_write_loop\")\n self.gdb.c()\n write_loop = self.gdb.p(\"&write_loop\")\n self.gdb.command(\"watch data\")\n self.gdb.c()\n # Accept hitting the breakpoint before or after the store instruction.\n assertIn(self.gdb.p(\"$pc\"), [write_loop, write_loop + 4])\n assertEqual(self.gdb.p(\"$a0\"), self.gdb.p(\"&data\"))",
"def do_test(self):\n exe_name = \"a.out\"\n exe = self.getBuildArtifact(exe_name)\n\n # Create the target\n target = self.dbg.CreateTarget(exe)\n self.assertTrue(target, VALID_TARGET)\n\n # Target variables. This is not actually expected to work, but\n # also shouldn't crash.\n g_counter = target.EvaluateExpression(\"g_counter\")\n self.assertTrue(\n g_counter.IsValid(),\n \"g_counter returned a valid value object.\")\n\n # Set the breakpoints\n outer_bkpt = target.BreakpointCreateBySourceRegex(\n 'Set top_level breakpoint here', self.main_source_spec)\n self.assertTrue(outer_bkpt.GetNumLocations() > 0, VALID_BREAKPOINT)\n\n function_bkpt = target.BreakpointCreateBySourceRegex(\n 'Set function breakpoint here', self.main_source_spec)\n self.assertTrue(function_bkpt.GetNumLocations() > 0, VALID_BREAKPOINT)\n\n # Launch the process, and do not stop at the entry point.\n process = target.LaunchSimple(None, None, os.getcwd())\n\n self.assertTrue(process, PROCESS_IS_VALID)\n\n # Frame #0 should be at our breakpoint.\n threads = lldbutil.get_threads_stopped_at_breakpoint(\n process, outer_bkpt)\n\n self.assertTrue(len(threads) == 1)\n self.thread = threads[0]\n\n # All the variables should be uninitialized at this point. Maybe sure\n # they look that way:\n frame = self.thread.frames[0]\n options = lldb.SBExpressionOptions()\n options.SetFetchDynamicValue(lldb.eDynamicCanRunTarget)\n\n error = lldb.SBError()\n\n # Examine the variables before initialization:\n\n g_counter = frame.EvaluateExpression(\"g_counter\", options)\n self.assertTrue(\n g_counter.IsValid(),\n \"g_counter returned a valid value object.\")\n value = g_counter.GetValueAsSigned(error)\n self.assertTrue(error.Success(), \"Got a value for g_counter\")\n self.assertTrue(\n value == 0,\n \"g_counter value is the uninitialized one.\")\n\n foo_var = frame.EvaluateExpression(\"my_foo\", options)\n self.assertTrue(\n foo_var.IsValid(),\n \"foo_var returned a valid value object.\")\n value = foo_var.GetValueAsUnsigned(error)\n self.assertTrue(error.Success(), \"foo_var has a value.\")\n self.assertTrue(value == 0, \"foo_var is null before initialization.\")\n\n my_large_dude = frame.EvaluateExpression(\"my_large_dude\", options)\n self.assertTrue(my_large_dude.IsValid(),\n \"my_large_dude returned a valid value object.\")\n value = my_large_dude.GetValue()\n self.assertTrue(error.Success(), \"Got a value for my_large_dude\")\n self.assertTrue(\n value is None,\n \"my_large_dude value is the uninitialized one.\")\n\n # Now proceed to the breakpoint in our main function, make sure we can\n # still read these variables and they now have the right values.\n threads = lldbutil.continue_to_breakpoint(process, function_bkpt)\n self.assertTrue(len(threads) == 1)\n\n self.thread = threads[0]\n\n # Examine the variables before initialization:\n\n g_counter = frame.EvaluateExpression(\"g_counter\", options)\n self.assertTrue(\n g_counter.IsValid(),\n \"g_counter returned a valid value object.\")\n value = g_counter.GetValueAsSigned(error)\n self.assertTrue(error.Success(), \"Got a value for g_counter\")\n self.assertTrue(value == 2, \"g_counter value should be 2.\")\n\n foo_var = frame.EvaluateExpression(\"my_foo\", options)\n self.assertTrue(\n foo_var.IsValid(),\n \"foo_var returned a valid value object.\")\n foo_var_x = foo_var.GetChildMemberWithName(\"x\")\n self.assertTrue(foo_var_x.IsValid(), \"Got value object for foo_var.x\")\n value = foo_var_x.GetValueAsUnsigned(error)\n self.assertTrue(error.Success(), \"foo_var.x has a value.\")\n self.assertTrue(value == 1, \"foo_var is null before initialization.\")\n\n my_large_dude = frame.EvaluateExpression(\"my_large_dude\", options)\n self.assertTrue(my_large_dude.IsValid(),\n \"my_large_dude returned a valid value object.\")\n my_large_dude_y = my_large_dude.GetChildMemberWithName(\"y\")\n self.assertTrue(\n my_large_dude_y.IsValid(),\n \"Got value object for my_large_dude.y\")\n value = my_large_dude_y.GetValueAsUnsigned(error)\n self.assertTrue(error.Success(), \"Got a value for my_large_dude.y\")\n self.assertTrue(\n value == 20,\n \"my_large_dude value is the uninitialized one.\")",
"def test_debugger_api_boundary_condition(self):\n self.dbg.HandleCommand(None)\n self.dbg.SetDefaultArchitecture(None)\n self.dbg.GetScriptingLanguage(None)\n self.dbg.CreateTarget(None)\n self.dbg.CreateTarget(None, None, None, True, lldb.SBError())\n self.dbg.CreateTargetWithFileAndTargetTriple(None, None)\n self.dbg.CreateTargetWithFileAndArch(None, None)\n self.dbg.FindTargetWithFileAndArch(None, None)\n self.dbg.SetInternalVariable(None, None, None)\n self.dbg.GetInternalVariableValue(None, None)\n # FIXME (filcab): We must first allow for the swig bindings to know if\n # a Python callback is set. (Check python-typemaps.swig)\n # self.dbg.SetLoggingCallback(None)\n self.dbg.SetPrompt(None)\n self.dbg.SetCurrentPlatform(None)\n self.dbg.SetCurrentPlatformSDKRoot(None)\n \n fresh_dbg = lldb.SBDebugger()\n self.assertEquals(len(fresh_dbg), 0)",
"def test(self):\n # 0x13 is nop\n self.gdb.command(\"p *((int*) 0x%x)=0x13\" % self.target.ram)\n self.gdb.command(\"p *((int*) 0x%x)=0x13\" % (self.target.ram + 4))\n self.gdb.command(\"p *((int*) 0x%x)=0x13\" % (self.target.ram + 8))\n self.gdb.p(\"$pc=0x%x\" % self.target.ram)\n self.gdb.stepi()\n assertEqual((self.target.ram + 4), self.gdb.p(\"$pc\"))\n self.gdb.stepi()\n assertEqual((self.target.ram + 8), self.gdb.p(\"$pc\"))",
"def test_add_circuit_noname(self):\n q_program = QuantumProgram()\n qr = q_program.create_quantum_register(size=2)\n cr = q_program.create_classical_register(size=2)\n qc1 = q_program.create_circuit(qregisters=[qr], cregisters=[cr])\n qc2 = q_program.create_circuit(qregisters=[qr], cregisters=[cr])\n qc1.h(qr[0])\n qc1.measure(qr[0], cr[0])\n qc2.measure(qr[1], cr[1])\n new_circuit = qc1 + qc2\n q_program.add_circuit(quantum_circuit=new_circuit)\n backend = 'local_qasm_simulator_py' # cpp simulator rejects non string IDs (FIXME)\n shots = 1024\n result = q_program.execute(backend=backend, shots=shots, seed=78)\n counts = result.get_counts(new_circuit.name)\n target = {'00': shots / 2, '01': shots / 2}\n threshold = 0.04 * shots\n self.assertDictAlmostEqual(counts, target, threshold)\n self.assertRaises(QISKitError, result.get_counts)",
"def _analyse_stmt_Assert(self, statement: ast.Assert, *, next: CFNode) -> CFNode:\n test_is_constant, test_value = self._expression_as_constant(statement.test)\n\n branches: Dict[str, CFNode] = {}\n if test_is_constant:\n if test_value:\n branches.update(next=next)\n else:\n branches.update(error=self._raise)\n else:\n branches.update(next=next, error=self._raise)\n\n return self._ast_node(statement, **branches)",
"def test_normal_circuit(self):\n filename = self._get_resource_path('test_normal.tex')\n qc = QuantumCircuit(5)\n for qubit in range(5):\n qc.h(qubit)\n\n circuit_drawer(qc, filename=filename, output='latex_source')\n\n self.assertEqualToReference(filename)",
"def test_analytic_value_with_simple_circuit(self, differentiator, op):\n # Get an expectation op, with this differentiator attached.\n differentiator.refresh()\n op = differentiator.generate_differentiable_op(analytic_op=op)\n qubit = cirq.GridQubit(0, 0)\n circuit = util.convert_to_tensor(\n [cirq.Circuit(cirq.X(qubit)**sympy.Symbol('alpha'))])\n psums = util.convert_to_tensor([[cirq.Z(qubit)]])\n symbol_values_array = np.array([[0.123]], dtype=np.float32)\n # Calculate tfq gradient.\n symbol_values_tensor = tf.convert_to_tensor(symbol_values_array)\n with tf.GradientTape() as g:\n g.watch(symbol_values_tensor)\n expectations = op(circuit, tf.convert_to_tensor(['alpha']),\n symbol_values_tensor, psums)\n grads = g.gradient(expectations, symbol_values_tensor)\n ground_truth_grads = np.array([[-1.1839752]])\n self.assertAllClose(ground_truth_grads, grads, rtol=1e-2, atol=1e-2)",
"def test_issue14355(self):\n\n def make_diagram():\n # Use a nested function to ensure that all locals get garbage\n # collected quickly.\n\n # Construct a trivial plant and ID controller.\n # N.B. We explicitly do *not* add this plant to the diagram.\n controller_plant = MultibodyPlant(time_step=0.002)\n controller_plant.Finalize()\n builder = DiagramBuilder()\n controller = builder.AddSystem(\n InverseDynamicsController(\n controller_plant,\n kp=[],\n ki=[],\n kd=[],\n has_reference_acceleration=False,\n )\n )\n # Forward ports for ease of testing.\n builder.ExportInput(\n controller.get_input_port_estimated_state(), \"x_estimated\")\n builder.ExportInput(\n controller.get_input_port_desired_state(), \"x_desired\")\n builder.ExportOutput(controller.get_output_port_control(), \"u\")\n diagram = builder.Build()\n return diagram\n\n diagram = make_diagram()\n # N.B. Without the workaround for #14355, we get a segfault when\n # creating the context.\n context = diagram.CreateDefaultContext()\n diagram.GetInputPort(\"x_estimated\").FixValue(context, [])\n diagram.GetInputPort(\"x_desired\").FixValue(context, [])\n u = diagram.GetOutputPort(\"u\").Eval(context)\n np.testing.assert_equal(u, [])",
"def test(self):\n self.gdb.b(\"main:start\")\n self.gdb.c()\n self.gdb.command(\"p i=0\")\n last_pc = None\n advances = 0\n jumps = 0\n for _ in range(100):\n self.gdb.stepi()\n pc = self.gdb.p(\"$pc\")\n assertNotEqual(last_pc, pc)\n if last_pc and pc > last_pc and pc - last_pc <= 4:\n advances += 1\n else:\n jumps += 1\n last_pc = pc\n # Some basic sanity that we're not running between breakpoints or\n # something.\n assertGreater(jumps, 10)\n assertGreater(advances, 50)",
"def test_circuit_init(self):\n circuit, target = self.simple_circuit_no_measure()\n op = Chi(circuit)\n target = Chi(target)\n self.assertEqual(op, target)",
"def test_orthorhombic_sims(cell_dimensions, crystal_params):\n # Multiple of 6 works nicely with the p2 crystal\n cell_dimensions = cell_dimensions * 6\n with crystal_params.temp_context(cell_dimensions=cell_dimensions):\n snapshot = init_from_crystal(crystal_params)\n snapshot = equilibrate(snapshot, crystal_params, equil_type=\"crystal\")\n snapshot = make_orthorhombic(snapshot)\n temp_context = hoomd.context.initialize(crystal_params.hoomd_args)\n production(snapshot, temp_context, crystal_params, dynamics=False)",
"def test_defcamp_2015(self):\n # Load the binary\n binary_file = os.path.join(os.path.dirname(__file__), \"misc\", \"defcamp-2015-r100.bin\")\n self.load_binary(binary_file)\n\n # Define a fake stack\n self.Triton.setConcreteRegisterValue(self.Triton.registers.rbp, 0x7fffffff)\n self.Triton.setConcreteRegisterValue(self.Triton.registers.rsp, 0x6fffffff)\n\n # Define an user input\n self.Triton.setConcreteRegisterValue(self.Triton.registers.rdi, 0x10000000)\n\n # Symbolize user inputs (30 bytes)\n for index in range(30):\n self.Triton.symbolizeMemory(MemoryAccess(0x10000000+index, CPUSIZE.BYTE))\n\n # Emulate from the verification function\n solution = self.emulate(0x4006FD)\n self.assertEqual(solution, 'Code_Talkers')",
"def test_deep_circuit(self):\n filename = self._get_resource_path('test_deep.tex')\n qc = QuantumCircuit(1)\n for _ in range(100):\n qc.h(0)\n\n circuit_drawer(qc, filename=filename, output='latex_source')\n\n self.assertEqualToReference(filename)",
"def _test_ic_wire_step(thick_width = 10, thin_width = 1, wire_layer = 2):\n WS4 = Device('test_ic_step')\n wire_stepa = WS4.add_ref(optimal_step(thick_width/2, thin_width/2,\n layer = wire_layer))\n wire_stepb = WS4.add_ref(optimal_step(thin_width/2, thick_width/2,\n layer = wire_layer))\n wire_stepc = WS4.add_ref(optimal_step(thick_width/2, thin_width/2,\n layer = wire_layer))\n wire_stepd = WS4.add_ref(optimal_step(thin_width/2, thick_width/2,\n layer = wire_layer))\n wire_stepb.rotate(180)\n wire_stepb.xmin = wire_stepa.xmin\n wire_stepc.rotate(180)\n wire_stepc.xmin = wire_stepa.xmax\n wire_stepd.xmin = wire_stepc.xmin\n return WS4",
"def add_breakpoint():\n raise NotImplementedError()",
"def test_constructor(self, circuit):\n assert list(circuit.wires) == [jet.Wire(i, 0, False) for i in range(4)]\n assert list(circuit.operations) == [jet.Operation(jet.Qubit(), [i]) for i in range(4)]",
"def test_signal_generation(fprime_test_api):\n fprime_test_api.send_and_assert_command(\n \"SG4.SignalGen_Settings\", [1, 5, 0, \"SQUARE\"]\n )\n # First telemetry item should fill only the first slot of the history\n history = [0, 0, 0, 5]\n pair_history = [{\"time\": 0, \"value\": value} for value in history]\n info = {\"type\": \"SQUARE\", \"history\": history, \"pairHistory\": pair_history}\n fprime_test_api.send_and_assert_command(\"SG4.SignalGen_Toggle\")\n fprime_test_api.assert_telemetry(\"SG4.History\", history, timeout=6)\n fprime_test_api.assert_telemetry(\"SG4.PairHistory\", pair_history, timeout=1)\n fprime_test_api.assert_telemetry(\"SG4.Info\", info, timeout=1)\n fprime_test_api.send_and_assert_command(\"SG4.SignalGen_Toggle\")",
"def test(self):\n self.build(dictionary={\"CXX_SOURCES\": \"main.cpp\", \"EXE\": \"a.out\"})\n\n exe = self.getBuildArtifact(\"a.out\")\n target = self.dbg.CreateTarget(exe)\n target.BreakpointCreateBySourceRegex(\"return\", lldb.SBFileSpec(\"rebuild.cpp\"))\n target.BreakpointCreateBySourceRegex(\"return\", lldb.SBFileSpec(\"main.cpp\"))\n process = target.LaunchSimple(None, None, self.get_process_working_directory())\n\n self.expect_expr(\n \"foo\",\n result_type=\"Foo\",\n result_children=[ValueCheck(name=\"m_val\", value=\"42\")],\n )\n\n # Delete the executable to force make to rebuild it.\n remove_file(exe)\n self.build(dictionary={\"CXX_SOURCES\": \"rebuild.cpp\", \"EXE\": \"a.out\"})\n\n # Rerun program within the same target\n process.Destroy()\n process = target.LaunchSimple(None, None, self.get_process_working_directory())\n\n self.expect_expr(\n \"foo\",\n result_type=\"Foo\",\n result_children=[\n ValueCheck(\n name=\"Base\", children=[ValueCheck(name=\"m_base_val\", value=\"42\")]\n ),\n ValueCheck(name=\"m_derived_val\", value=\"137\"),\n ],\n )\n\n self.filecheck(\"target module dump ast\", __file__)\n\n # The new definition 'struct Foo' is in the scratch AST\n # CHECK: |-CXXRecordDecl {{.*}} struct Foo definition\n # CHECK: | |-public 'Base'\n # CHECK-NEXT: | `-FieldDecl {{.*}} m_derived_val 'int'\n # CHECK-NEXT: `-CXXRecordDecl {{.*}} struct Base definition\n # CHECK: `-FieldDecl {{.*}} m_base_val 'int'\n\n # ...but the original definition of 'struct Foo' is not in the scratch AST anymore\n # CHECK-NOT: FieldDecl {{.*}} m_val 'int'",
"def test_dummy():\n # ARRANGE\n number = 1\n # ACT\n number += 1\n # ASSERT\n assert number == 2",
"def test_wires_expval(\n self, device, circuit_factory, wires1, wires2, tol\n ): # pylint: disable=too-many-arguments\n dev1 = device(wires1)\n dev2 = device(wires2)\n\n circuit1 = circuit_factory(dev1, wires1)\n circuit2 = circuit_factory(dev2, wires2)\n\n assert np.allclose(circuit1(), circuit2(), atol=tol(dev1.shots))",
"def _assert(condition, message):\n if not condition:\n raise AssertionError(message)",
"def test_operator_with_invalid_wire(self, monkeypatch, test_batch_result):\n dev = QeQiskitDevice(\n wires=[\"a\", \"b\", \"c\"], shots=1000, backend=\"qasm_simulator\", analytic=False\n )\n\n with monkeypatch.context() as m:\n m.setattr(pennylane_orquestra.cli_actions, \"user_data_dir\", lambda *args: tmpdir)\n\n # Disable submitting to the Orquestra platform by mocking Popen\n m.setattr(subprocess, \"Popen\", lambda *args, **kwargs: MockPopen())\n m.setattr(\n pennylane_orquestra.orquestra_device,\n \"loop_until_finished\",\n lambda *args, **kwargs: test_batch_result,\n )\n\n @qml.qnode(dev)\n def circuit():\n return qml.expval(qml.PauliZ(0))\n\n with pytest.raises(\n qml.qnodes.base.QuantumFunctionError,\n match=\"Operation PauliZ applied to invalid wire\",\n ):\n circuit()",
"def test_example():\n x = 0\n y = 1\n assert x != y",
"def test_simple_assertions2(self):\n kb = logic.PropKB()\n kb.tell(logic.expr('color(cat, coat, black)'))\n self.assertAllBindingsEqual(\n kb.ask_all(logic.expr('color(cat, coat, black)')),\n [{}])\n kb.tell(logic.expr('age(cat, toy, 35)'))\n self.assertAllBindingsEqual(\n kb.ask_all(logic.expr('age(cat, toy, 35)')),\n [{}])\n kb.tell(logic.expr('color(cat, mitten, left, black)'))\n self.assertAllBindingsEqual(\n kb.ask_all(logic.expr('color(cat, mitten, left, black)')),\n [{}])\n kb.tell(logic.expr('age(cat, toy, top, 35)'))\n self.assertAllBindingsEqual(\n kb.ask_all(logic.expr('age(cat, toy, top, 35)')),\n [{}])\n kb.tell(logic.expr('age(cat, toy, top, x, y, z, 35)'))\n self.assertAllBindingsEqual(\n kb.ask_all(logic.expr('age(cat, toy, top, x, y, z, 35)')),\n [{}])",
"def assertVariableValue(self, file, a, b):\n file.write(\"ASSERT({} = {});\\n\".format(a, b))\n return",
"def check_trace(self, step_method):\n n_steps = 100\n with Model():\n x = Normal('x', mu=0, sd=1)\n if step_method.__name__ == 'SMC':\n Deterministic('like', - 0.5 * tt.log(2 * np.pi) - 0.5 * x.T.dot(x))\n trace = smc.ATMIP_sample(n_steps=n_steps, step=step_method(random_seed=1),\n n_jobs=1, progressbar=False, stage='0',\n homepath=self.temp_dir)\n else:\n trace = sample(n_steps, step=step_method(), random_seed=1)\n\n print(repr(trace.get_values('x')))\n assert_array_almost_equal(\n trace.get_values('x'),\n self.master_samples[step_method],\n decimal=select_by_precision(float64=6, float32=4))"
] | [
"0.6328285",
"0.5668417",
"0.5596492",
"0.5110465",
"0.50538695",
"0.5012597",
"0.49447802",
"0.49177045",
"0.49040687",
"0.48579818",
"0.48148197",
"0.481229",
"0.47325182",
"0.47238013",
"0.47059774",
"0.46966594",
"0.46892297",
"0.46688747",
"0.46467826",
"0.4646354",
"0.4643045",
"0.46213526",
"0.46178538",
"0.4602418",
"0.4592822",
"0.45796886",
"0.4565457",
"0.4557773",
"0.4538857",
"0.45243722"
] | 0.6174962 | 1 |
Geotransform the original map and create an inverse geotransform for the raster. | def _get_inv_gt(self):
# Geotransform the original map
self.in_gt = self.in_ds.GetGeoTransform()
# Create an inverse geotransform for the raster.
# This converts real-world coordinates to pixel offsets.
self.inv_gt = gdal.InvGeoTransform(self.in_gt)
if gdal.VersionInfo()[0] == '1':
if self.inv_gt[0] == 1:
self.inv_gt = self.inv_gt[1]
else:
raise RuntimeError('Inverse geotransform failed')
elif self.inv_gt is None:
raise RuntimeError('Inverse geotransform failed') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def geo_transform(self):\n pass",
"def geotransform(self):\n return self._geotransform",
"def __affine_geo_transformation(x, y, gtr):\n\n # https://gdal.org/user/raster_data_model.html#affine-geotransform\n # Affine transformation rewritten for rasterio:\n gtr_x = gtr[2] + (x + 0.5) * gtr[0] + (y + 0.5) * gtr[1]\n gtr_y = gtr[5] + (x + 0.5) * gtr[3] + (y + 0.5) * gtr[4]\n\n return gtr_x, gtr_y",
"def apply_inverse_map(self, transport_map, sig0):\n # Check input arrays\n transport_map = check_array(transport_map, ndim=2,\n dtype=[np.float64, np.float32])\n sig0 = check_array(sig0, ndim=2, dtype=[np.float64, np.float32],\n force_strictly_positive=True)\n\n # Initialize Radon transforms\n rad0 = radon(sig0, theta=self.theta, circle=False)\n rad1 = np.zeros_like(rad0)\n\n # Check transport map and Radon transforms are the same size\n assert_equal_shape(transport_map, rad0,\n ['transport_map', 'Radon transform of sig0'])\n\n # Loop over angles\n cdt = CDT()\n for i in range(self.theta.size):\n # Convert projection to PDF\n j0 = signal_to_pdf(rad0[:,i], epsilon=1e-8, total=1.)\n\n # Radon transform of sig1 comprised of inverse CDT of projections\n rad1[:,i] = cdt.apply_inverse_map(transport_map[:,i], j0)\n\n # Inverse Radon transform\n sig1_recon = iradon(rad1, self.theta, circle=False, filter='ramp')\n\n # Crop sig1_recon to match sig0\n sig1_recon = match_shape2d(sig0, sig1_recon)\n\n return sig1_recon",
"def inv_projmap(self, img, nside=None):\n pass",
"def Reproject(cls,inRaster, outRaster,EPSG):\r\n try:\r\n import gdal\r\n except:\r\n raise ImportError(\"Can not import module GDAL\")\r\n try:\r\n dataset = gdal.Open(infile)\r\n out = dataset.GetRasterBand(1)\r\n print dataset.GetMetadata()\r\n return out\r\n except:\r\n raise ImportError(\"Can not read band\")\r\n #if not tmp:\r\n # raise Exception(\"Could not orthorectify the image \")\r\n\r\n com=\" \".join([\"gdalwarp -t_srs EPSG:\"+str(EPSG),InRaster,OutRaster])\r\n tmp=os.system(com)\r\n print tmp\r\n #if not tmp:\r\n # raise Exception(\"Could not reproject the image \")\r\n #os.remove(OutRaster)\r",
"def reproject(self, lon, lat):\n if self.xform is None:\n # if the CRS hasn't been determined yet, we set it from the first image's lat/lon (take the UTM crs)\n utm_i = str(int(math.floor((self.images[0].lon + 180) / 6 ) % 60) + 1).zfill(2)\n epsg_code = int('326' + utm_i) if (self.images[0].lat >= 0) else int('327' + utm_i)\n self.crs_dest = QgsCoordinateReferenceSystem(epsg_code)\n self.xform = QgsCoordinateTransform(self.crs_src, self.crs_dest, QgsProject.instance())\n return self.xform.transform(QgsPointXY(lon, lat))",
"def _build_geotransform(self, i, j):\n assert isinstance(i, int), (\"i is not an integer\")\n assert isinstance(j, int), (\"j is not an integer\")\n x_origin, x_res, x_ignore, y_origin, y_ignore, y_res = (\n self.image_metadata.geotransform)\n # integer conversion to reduce floating point error\n new_x_origin = self._calculate_origin(x_origin, x_res, self.offset, j)\n new_y_origin = self._calculate_origin(y_origin, y_res, self.offset, i)\n geotransform = (new_x_origin, x_res, x_ignore, new_y_origin, \n y_ignore, y_res) \n return geotransform",
"def xtransformed(geo, transformation):\n T = xform_from_transformation(transformation)\n geo_copy = geo.Duplicate()\n geo_copy.Transform(T)\n return geo_copy",
"def test_30_supergeom_translate(self):\n proj = 'CAR'\n ra0, dec0 = CRVAL\n res = 0.01 * DEG\n wcs = coords.get_wcs_kernel(proj, ra0, dec0, res)\n\n wcs.wcs.crpix = (60, 70)\n map0 = enmap.zeros((100,200), wcs=wcs)\n map0[2, 3] = 10.\n map0[90, 192] = 11.\n\n # Extracts.\n m1 = map0[:10,:10]\n m2 = map0[-10:,-10:]\n\n # In simple cylindrical projections, there's a degeneracy\n # between crval and crpix in the longitude component -- crval\n # can be anywhere on the equator. It is useful to be able to\n # join maps even if they have different crval[0], provided the\n # pixel centers line up. (The same is not true of crval[1],\n # which tips the native equator relative to the celestial\n # equator.)\n\n for axis, should_work in [(0, True), (1, False)]:\n dpix = 10.5\n m2 = map0[-10:,-10:]\n m2.wcs.wcs.crpix[axis] += dpix\n m2.wcs.wcs.crval[axis] += dpix * m2.wcs.wcs.cdelt[axis]\n\n if should_work:\n sg = coords.get_supergeom((m1.shape, m1.wcs), (m2.shape, m2.wcs))\n mapx = enmap.zeros(*sg)\n mapx.insert(m1)\n mapx.insert(m2)\n self.assertTupleEqual(map0.shape, mapx.shape,\n msg=\"Reconstructed map shape.\")\n self.assertTrue(np.all(mapx==map0),\n msg=\"Reconstructed map data.\")\n\n else:\n msg = \"Translating crval in dec should cause \"\\\n \"coord consistency check failure.\"\n with self.assertRaises(ValueError, msg=msg):\n sg = coords.get_supergeom((m1.shape, m1.wcs), (m2.shape, m2.wcs))",
"def reproject_GeoGrid(geogrid_in, srs_string,\n out_xdim=None, out_ydim=None, out_geotransform=None,\n out_nodata_value=None, interp_method=None):\n src = geogrid_as_gdalInMem(geogrid_in)\n\n out_srs = osr.SpatialReference()\n assign_projection_to_srs(out_srs, srs_string)\n out_wkt = out_srs.ExportToWkt()\n\n dst_gdal_datatype = get_gdal_datatype(geogrid_in.data_array.dtype)\n\n try:\n dst = gdal.GetDriverByName('MEM').Create(\n '',\n out_xdim,\n out_ydim,\n 1,\n dst_gdal_datatype,\n )\n dst.SetGeoTransform(out_geotransform)\n dst.SetProjection(out_wkt)\n except ValueError:\n raise ValueError('Error creating dst in reproject_GeoGrid()')\n except AttributeError:\n raise ValueError('AttributeError in dst creation')\n\n\n gdal_interp_method = getGdalInterpMethod(interp_method)\n res = gdal.ReprojectImage(src,\n dst,\n src.GetProjection(),\n dst.GetProjection(),\n gdal_interp_method,\n )\n\n\n return geogrid_from_gdalInMem(dst)",
"def __init__(self, raster_path):\n self.raster_path = raster_path\n dataset = gdal.Open(raster_path)\n self.width = dataset.RasterXSize\n self.height = dataset.RasterYSize\n # Gets the gdal geo transformation tuples\n # gdal_version = gdal.__version__\n self._txf = dataset.GetGeoTransform()\n # self._inv_txf = gdal.InvGeoTransform(self._txf)[1]\n self._inv_txf = gdal.InvGeoTransform(self._txf)\n # Gets the transformation from lat/lon to coordinates\n wgs84_ref = osr.SpatialReference()\n wgs84_ref.ImportFromEPSG(4326) # WGS84\n sref = osr.SpatialReference()\n sref.ImportFromWkt(dataset.GetProjection())\n if int(osgeo.__version__[0]) >= 3:\n # Output order has changed in osgeo v3\n wgs84_ref.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)\n sref.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)\n\n self._transform = osr.CoordinateTransformation(wgs84_ref, sref)\n inv_transform = osr.CoordinateTransformation(sref, wgs84_ref)\n # Find a loose lat/lon bounding box for quick check without\n # having to do full coordinates transformation\n corners = []\n for x in [0, self.width]:\n for y in [0, self.height]:\n corners.append([self._txf[0] + self._txf[1] * x + self._txf[2] * y,\n self._txf[3] + self._txf[4] * x + self._txf[5] * y])\n self.max_lat = -100\n self.min_lat = 100\n self.max_lon = -500\n self.min_lon = 500\n for c in corners:\n p = inv_transform.TransformPoint(c[0], c[1])\n if p[0] > self.max_lon:\n self.max_lon = p[0]\n if p[0] < self.min_lon:\n self.min_lon = p[0]\n if p[1] > self.max_lat:\n self.max_lat = p[1]\n if p[1] < self.min_lat:\n self.min_lat = p[1]\n dataset = None",
"def _normalize_affine_map(\n self, affine_map: _ir.AffineMap, with_dims: bool = True\n ) -> _ir.AffineMap:\n with self.context:\n return _ir.AffineMap.get(\n dim_count=self.affine_state.dim_count if with_dims else 0,\n symbol_count=self.affine_state.symbol_count,\n exprs=list(affine_map.results),\n )",
"def reprojectAndSaveNewRaster(inFilepath,outFilepath,to_EPSG):\r\n from osgeo import gdal\r\n input_raster = gdal.Open(inFilepath)\r\n EPSG_string = \"EPSG:\"+str(to_EPSG)\r\n ras = gdal.Warp(outFilepath,input_raster,dstSRS=EPSG_string)\r\n del ras",
"def Reproject(x, y, in_grid = 4326, out_grid = 32737):\n \n inProj = Proj(init='epsg:'+str(in_grid))\n outProj = Proj(init='epsg:'+str(out_grid))\n \n \n x2,y2 = transform(inProj,outProj,x,y)\n \n return x2, y2",
"def geotransform(self):\n return self.dataset.GetGeoTransform() if self.dataset else None",
"def GetGeoTransform(raster_path):\n \n #open a GDAL object containig the raster\n gdal_img = gdal.Open(raster_path)\n \n #extract basic geospatial data\n ulx, xres, xskew, uly, yskew, yres = gdal_img.GetGeoTransform()\n \n #calculate lower right coordinates from upper left coordinates and raster size\n lrx = ulx + (gdal_img.RasterXSize * xres)\n lry = uly + (gdal_img.RasterYSize * yres)\n \n geoinfo = {'ulx': ulx,\n 'lrx': lrx,\n 'uly': uly,\n 'lry': lry,\n 'xres': xres,\n 'xskew': xskew,\n 'yres': yres,\n 'yskew': yskew\n }\n \n return geoinfo",
"def inverse_transform(self, X):\n X = super(PowerTransformer, self).inverse_transform(X)\n\n if self.pre_center:\n X = self.pre_centerer_.inverse_transform(X)\n\n if self.rescale:\n X = self.rescaler_.inverse_transform(X)\n\n return X",
"def _getGeoTransform(self):\n with self._getDatasetLock:\n gt = self.dataset.GetGeoTransform()\n if (self.dataset.GetGCPProjection() and self.dataset.GetGCPs()):\n gt = gdal.GCPsToGeoTransform(self.dataset.GetGCPs())\n return gt",
"def inverse_transform(self, X, copy=...):\n ...",
"def reprojectRaster(in_raster,model_raster,out_dir,name_override = None):\n\tif name_override:\n\t\tout_name = os.path.join(out_dir,name_override)\n\telse:\n\t\tin_base,in_ext = os.path.splitext(os.path.basename(in_raster))\n\t\tout_name = os.path.join(out_dir,in_base+\"_REPROJ\"+in_ext)\n\t\t#print(out_name)\n\tt_ds = gdal.Open(model_raster,0)\n\tt_wkt = t_ds.GetProjection()\n\ts_ds = gdal.Open(in_raster,0)\n\ts_wkt = s_ds.GetProjection()\n\tt_ds = s_ds = None\n\tif s_wkt != t_wkt:\n\t\treproj_args = [r\"C:\\OSGeo4W64\\bin\\gdalwarp.exe\",\"-t_srs\",t_wkt,in_raster,out_name]\n\t\tsubprocess.call(reproj_args)\n\t\treturn out_name\n\telse:\n\t\tlog.warning(\"Projections already match, doing nothing.\")\n\t\treturn in_raster",
"def _apply_transform(self, img: np.ndarray): \n img = self.transform(image=img)[\"image\"]\n return img",
"def _apply_transform(self, img: np.ndarray): \n img = self.transform(image=img)[\"image\"]\n return img",
"def inverse_transform(self, X):\n ...",
"def inverse_transform(self, X):\n ...",
"def inverse_transform(self, X):\n ...",
"def inverse_transform(self, X):\n ...",
"def inverse_transform(self, X):\n ...",
"def img_map_transforms(ts):\n # XXX TODO: unchecked textures give error of variable referenced before assignment XXX\n # POV-Ray \"scale\" is not a number of repetitions factor, but ,its\n # inverse, a standard scale factor.\n # 0.5 Offset is needed relatively to scale because center of the\n # scale is 0.5,0.5 in blender and 0,0 in POV\n # Strange that the translation factor for scale is not the same as for\n # translate.\n # TODO: verify both matches with other blender renderers / internal in previous versions.\n image_map_transforms = \"\"\n image_map_transforms = \"scale <%.4g,%.4g,%.4g> translate <%.4g,%.4g,%.4g>\" % (\n ts.scale[0],\n ts.scale[1],\n ts.scale[2],\n ts.offset[0],\n ts.offset[1],\n ts.offset[2],\n )\n # image_map_transforms = (\" translate <-0.5,-0.5,0.0> scale <%.4g,%.4g,%.4g> translate <%.4g,%.4g,%.4g>\" % \\\n # ( 1.0 / ts.scale.x,\n # 1.0 / ts.scale.y,\n # 1.0 / ts.scale.z,\n # (0.5 / ts.scale.x) + ts.offset.x,\n # (0.5 / ts.scale.y) + ts.offset.y,\n # ts.offset.z))\n # image_map_transforms = (\n # \"translate <-0.5,-0.5,0> \"\n # \"scale <-1,-1,1> * <%.4g,%.4g,%.4g> \"\n # \"translate <0.5,0.5,0> + <%.4g,%.4g,%.4g>\" % \\\n # (1.0 / ts.scale.x,\n # 1.0 / ts.scale.y,\n # 1.0 / ts.scale.z,\n # ts.offset.x,\n # ts.offset.y,\n # ts.offset.z)\n # )\n return image_map_transforms",
"def _pixel_to_map(coordinates, geotransform):\n coordinates_map = np.empty(coordinates.shape)\n coordinates_map[..., 0] = (\n geotransform[0]\n + geotransform[1] * coordinates[..., 0]\n + geotransform[2] * coordinates[..., 1]\n )\n coordinates_map[..., 1] = (\n geotransform[3]\n + geotransform[4] * coordinates[..., 0]\n + geotransform[5] * coordinates[..., 1]\n )\n return coordinates_map"
] | [
"0.7338185",
"0.65408427",
"0.6371385",
"0.63351965",
"0.63224435",
"0.62674654",
"0.62644494",
"0.62108505",
"0.6176385",
"0.61081815",
"0.6096801",
"0.6054763",
"0.604431",
"0.5978923",
"0.59416425",
"0.5915392",
"0.5910879",
"0.58931345",
"0.58687687",
"0.58653194",
"0.5811997",
"0.5783999",
"0.5783999",
"0.5728489",
"0.5728489",
"0.5728489",
"0.5728489",
"0.5728489",
"0.57092243",
"0.568975"
] | 0.67420596 | 1 |
get clip location in array. | def _get_clip_loc_in_array(self):
# coordinates of upperleft and lowerright points of binding box
box_ulx, box_uly, box_lrx, box_lry = self.clip_box[0][0], self.clip_box[0][1], \
self.clip_box[1][0], self.clip_box[1][1]
# Get the offsets that correspond to the bounding box corner coordinates.
offsets_ul = gdal.ApplyGeoTransform(self.inv_gt, box_ulx, box_uly)
offsets_lr = gdal.ApplyGeoTransform(self.inv_gt, box_lrx, box_lry)
# The offsets are returned as floating point, but we need integers.
self.off_ulx, self.off_uly = map(int, offsets_ul)
self.off_lrx, self.off_lry = map(int, offsets_lr)
# Compute the numbers of rows and columns to extract, based on the offsets.
self.row = self.off_lry - self.off_uly
self.column = self.off_lrx - self.off_ulx | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def clip(self):\r\n\t\treturn self._clip",
"def get_marker_absolute_pos(marker_position, clip):\n marker_x = (marker_position[\"x\"] * clip.w) - marker_image.w / 2\n marker_y = (marker_position[\"y\"] * clip.h) - marker_image.h / 2\n return marker_x, marker_y",
"def location_of(self, index):\n img_w, img_h = self.conv_dims()\n \n x = (index % img_w) * self.stride\n y = (index // img_w) * self.stride\n \n return x, y",
"def get_pix_pos(self):\r\n return vec((self.grid_pos[0]*self.app.cell_width)+TOP_BOTTOM_BUFFER//2+self.app.cell_width//2,\r\n (self.grid_pos[1]*self.app.cell_height) +\r\n TOP_BOTTOM_BUFFER//2+self.app.cell_height//2)\r\n # where Pac-Man starts relative to the board\r",
"def get_roi_coords(self, roi):\n if self.is_4d():\n data = self._data[..., self._time_point]\n else:\n data = self._data\n coord = (data==roi).nonzero()\n #return (data==roi).nonzero()\n return (coord[1], self._y_shift - coord[0], coord[2])",
"def get_pos(self, frame):\n frame = self.perspective_shift(frame)\n \n puck_mask = self.color_mask(frame, self.color_green, thresh=15)\n striker_mask = self.color_mask(frame, self.color_orange, thresh=25, blur=5)\n \n puck_loc, _ = self.find_centroids(puck_mask)\n striker_locs, _ = self.find_centroids(striker_mask, 2)\n \n p_pos = self.abs_to_meter(puck_loc[0])\n # cases: (pos,pos), (pos,None), (None,None)\n if striker_locs[0] is not None:\n pos_1 = self.abs_to_meter(striker_locs[0])\n pos_2 = self.abs_to_meter(striker_locs[1])\n s1_pos = pos_1 if pos_1[1]<0 else pos_2\n s2_pos = pos_2 if pos_1[1]<0 else pos_1\n else:\n s1_pos, s2_pos = None, None \n \n return [p_pos, s1_pos, s2_pos]",
"def player_location(self):\n x = 0\n y = 0\n for line in self.grid:\n for i in line:\n if i == \"P\":\n return x, y\n \n y+=1\n x += 1\n y = 0",
"def pos(self):\n x = (self.ec._win._mouse_x -\n self.ec._win.width / 2.) / (self.ec._win.width / 2.)\n y = (self.ec._win._mouse_y -\n self.ec._win.height / 2.) / (self.ec._win.height / 2.)\n return np.array([x, y])",
"def getPosicion(self):\r\n\t\treturn [self._x, self._y]",
"def coordinates(self, mask):\n y,x = mask.nonzero()\n return list(zip(x,y))",
"def get_xy(self):\r\n return self.board.get_xy()",
"def _getCoords(self):\n\n if self._coords is not None:\n return self._coords[self._acsi]",
"def __getitem__(self, i):\n return self.__points[i]",
"def getPosition(self):\n\t\txxx1 = self.stokes()\n\t\txxx2 = self.thp()\n\t\txxx3 = self.tthp()\n\t\treturn [xxx1, xxx2, xxx3]",
"def __get_position(self, value, state):\n coords = np.argwhere(state == value).flatten()\n return coords",
"def chunk_array_position(self, x, y, z):\n return y * 256 + z * 16 + x",
"def trace(self, coord01: np.ndarray) -> np.ndarray:\n rect = self.clip_rect()\n return (rect.position + coord01 * rect.size).astype(np.int)",
"def getClipData(self, x, y, t0, t1):\n it0 = (numpy.abs(x - t0)).argmin()\n it1 = (numpy.abs(x - t1)).argmin()\n if it0 > it1:\n t = it1\n it1 = it0\n it0 = t\n return (x[it0:it1], y[it0:it1])",
"def clipping(self):\n\n return self._clipping",
"def get_image_xy(self, idx, wave_obj):\n\n wave_data = self.table[idx][\"wavelength\"]\n trace_data = self.table[idx][\"trace\"]\n\n y = int(np.round(np.interp(wave_obj, wave_data, range(len(wave_data)))))\n x = int(np.round(np.interp(y, range(len(trace_data)), trace_data)))\n return x, y",
"def get_image_xy(self, idx, wave_obj):\n\n wave_data = self.table[idx][\"wavelength\"]\n trace_data = self.table[idx][\"trace\"]\n\n y = int(np.round(np.interp(wave_obj, wave_data, range(len(wave_data)))))\n x = int(np.round(np.interp(y, range(len(trace_data)), trace_data)))\n return x, y",
"def get_piece(self, index):\n return self.squares[index]",
"def getCoord(self, i):\n _x = self.__xpts[i]\n _y = self.__ypts[i]\n return _x, _y",
"def get_position(self, position):",
"def ndarray_to_location(array: np.ndarray) -> carla.Location: # pylint: disable=no-member\n return carla.Location(*list(map(float, array))) # pylint: disable=no-member",
"def getCoords( self, i : int ):\n return enumerate(self._Vals[self._layout.dims_order[i]] \\\n [self._layout.starts[i]:self._layout.ends[i]])",
"def get_pos_in_pixels(self):\n pixelpos = Vector(self.pos.x * 32, -self.pos.y * 32)\n return pixelpos + self.offset",
"def get_pixel_pos(self):\n\n c = self.get_center()\n\n return Tank.three_by_three(c[0],c[1])",
"def getPosition(self):\n return self.ray.position",
"def chr_coords(s):\n return max_y - (max_y - min_y)*s"
] | [
"0.62954205",
"0.61157066",
"0.5843126",
"0.5814552",
"0.57981294",
"0.5748439",
"0.5678269",
"0.5570744",
"0.55497",
"0.5545587",
"0.55285585",
"0.55121255",
"0.5457112",
"0.544028",
"0.5431783",
"0.5430645",
"0.54257566",
"0.5412553",
"0.54099363",
"0.5408648",
"0.5408648",
"0.5405489",
"0.5403393",
"0.5379675",
"0.5374996",
"0.53748864",
"0.5365419",
"0.5363963",
"0.53600216",
"0.53466356"
] | 0.755718 | 0 |
Unique class is stored as self.unique_class. Total number of classes is stored as self.total_classes_number. Total number of each class is stored in self.class_number_dict. Total number of unique words is stored as self.unique_word_number. Total number of a word in a class is stored in self.class_word_number_dict. otal number of a word in a class is stored in self.class_total_words_dict. | def __init__(self):
self.unique_classes = []
self.total_classes_number = 0
self.class_number_dict = {}
self.unique_word_number = 0
self.class_word_number_dict = {}
self.class_total_words_dict = {} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def classes(self):\n if not hasattr(self, '_unique_classes'):\n # build when we don't have\n self._unique_classes = self.data['label'].unique()\n self._unique_classes.sort()\n\n ret = self._unique_classes\n return ret",
"def calc_class_weights(self):\n y = self.train_eval_data[\"sentiment\"]\n self.class_weights = {}\n classes = np.unique(y)\n for cls in classes:\n self.class_weights[cls] = len(y) / (len(classes) * (y == cls).sum())",
"def finalize_class_set(self) -> None:\n logger.info(\"We have {} distinct classes, let's cluster it!\", len(self.classes))\n\n logger.debug(\"Created a cluster instance {} and this will cluster {} samples\", self.cluster, self.classes)\n try:\n assigned_clusters = self.cluster.cluster(vectors=[self.convert_str_list_to_vector(c) for c in self.classes],\n assign_clusters=True, trace=not execute_on_ssh_compute)\n except Exception:\n logger.exception(\"Failed to cluster the actual class set ({} samples)\", len(self.classes))\n return\n\n self.classes_to_one_hot_encode_dict.clear()\n for i in range(len(self.classes)):\n self.classes_to_one_hot_encode_dict[self.classes[i]] = assigned_clusters[i]",
"def summarize_classes(classes):\n u, indices = np.unique(classes,return_inverse=True)\n num_u=len(u)\n print(\"****************************\")\n print(\"Number of samples: {0}\".format(len(classes)))\n print(\"Number of Classes:{0}\".format(num_u))\n for c in u:\n num_c=np.sum(classes==c)\n print(\"Class {0}: {1} Samples\".format(c,num_c))\n print(\"****************************\")",
"def get_num_classes(self):",
"def assignClass(self):\n classes = {}\n classes['en'] = 0\n classes['nl'] = 0\n assignedClass = \"\"\n\n for record in self.data:\n if record[-1] == 'en':\n classes['en'] += 1\n elif record[-1] == 'nl':\n classes['nl'] += 1\n\n max = 0\n for key in classes.keys():\n # get max class\n if max < classes[key]:\n max = classes[key]\n assignedClass = key\n\n self.enClass = classes['en']\n self.nlClass = classes['nl']\n\n return assignedClass",
"def classes(self) -> List[Any]:\n return list(self.label_counts.keys())",
"def num_class(self):\r\n return self._num_class",
"def __init__(self):\n\n # List of all the class labels\n self.labels = [0, 1, 2, 3]\n\n # Dictionary to store count of each label in predicted labels list\n self.total_prediction_count = {0: 0, 1: 0, 2: 0, 3: 0}\n\n # Dictionary to store count of each label in actual labels list\n self.total_actual_count = {0: 0, 1: 0, 2: 0, 3: 0}\n\n # Dictionary to store count of correctly predicted labels\n self.total_correct_prediction_count = {0: 0, 1: 0, 2: 0, 3: 0}",
"def classesAndFrames(self):\n classes = defaultdict(int)\n with open(self.inputfile) as fin:\n for line in fin:\n arr = line.strip().split()\n y = int(arr[1])\n classes[y] += 1\n return classes",
"def num_classes(self):\n\t\t\treturn len(self.classes)",
"def num_classes(self):\n\t\treturn len(self.classes)",
"def make_classdict(self):\n\t\ttarget_names = self.fetched[\"train\"].target_names\n\t\tself.classdict = {target_names[idx]: idx for idx in range(len(target_names))}",
"def class_conditional_word_dist(self, Mprint=20):\n self.class_word_dist = np.array(np.vstack([self.data[self.labels == ci, :].sum(0)/self.data[self.labels == ci, :].sum() for ci in np.unique(self.labels)])) # num of classes x num of words\n self.labels_word = self.class_word_dist.argmax(0)\n for i in range(self.class_word_dist.shape[0]):\n print('top {} frequent words in class {}'.format(Mprint, i))\n idx = np.argsort(self.class_word_dist[i, :])[::-1][:Mprint]\n for j in range(Mprint):\n print(' {:3d}: {:10s} {:.4f}'.format(j, self.vocab[idx[j]], self.class_word_dist[i, idx[j]]))",
"def num_classes(self):\n return len(self.classes)",
"def __init__(self, classes, data_size):\r\n self.classes = classes\r\n self.data_size = data_size\r\n self.conditional_prob = {class_:{} for class_ in classes} # Conditional Probability Table for storing parameters useful to compute P(feat|class_)\r\n self.class_prob = {} # Stores the priors\r",
"def majority_class (self, classData):\n\n\t\t###### your implementation below ######\n\t\ttempSet = {}\n\t\thighest = 0\n#\t\tmajority = \"\"\n\n\t\tfor i in range(len(classData)):\n\t\t\ttempSet[classData[i]] = (classData.count(classData[i]))\n\n\t\tfor i in tempSet:\n\t\t\tif (tempSet[i] >= highest):\n\t\t\t\thighest = tempSet[i]\n\t\t\t\t\n\t\treturn highest",
"def test_class_counts(self):\n oz = ClassificationScoreVisualizer(GaussianNB())\n oz.fit(self.multiclass.X.train, self.multiclass.y.train)\n\n unique, counts = np.unique(self.multiclass.y.train, return_counts=True)\n npt.assert_array_equal(oz.classes_, unique)\n npt.assert_array_equal(oz.class_counts_, counts)",
"def num_classes(self):\n raise NotImplementedError",
"def num_classes(self):\n\t\treturn 10",
"def get_class_labels(self):\r\n \r\n y = self.get_data()['y']\r\n if type(y) == torch.Tensor:\r\n return y.unique().numpy()\r\n else:\r\n return sorted(list(set(y)))",
"def get_number_of_classes(self):\n return len(self.class_dict.keys())",
"def num_classes(self):\n return self._num_classes",
"def __init__(self):\n\t\tself.word_count_dict = {}\n\t\tself.num_comments = 0\n\t\tself.num_words = 0",
"def num_classes():\n return NUM_CLASSES",
"def classif(self, text):\r\n content = self.prizn.tokenize(text)\r\n filec = self.vectorize_content(content)\r\n selected = {}\r\n for klas in self.prizn.klas_tridy:\r\n distance = 0.0\r\n wrdc = 0.0\r\n for wrd in filec:\r\n if wrd in self.prizn.klas_tridy[klas]:\r\n wrdc += 1.0\r\n distance += abs(float(filec[wrd]) - float(self.prizn.klas_tridy[klas][wrd]))\r\n if wrdc > 0:\r\n selected[klas] = float(distance) / float(wrdc)\r\n\r\n max_class = \"\"\r\n for i in range(0, 3):\r\n klas = max(selected, key=lambda k: selected[k])\r\n max_class = max_class + \" ,\" + klas\r\n del selected[klas]\r\n\r\n return max_class",
"def view_counts():\n out = {}\n for i in range(len(classes)):\n out.update({decoded[i]: storage.count(classes[i])})\n return out",
"def __init__ (self):\n self.lengths = {}\n self.lower_counts = {}\n self.upper_counts = {}\n self.digit_counts = {}\n self.symbol_counts = {}\n self.class_counts = {}\n self.word_counts = {}",
"def n_classes(self):\n raise NotImplementedError",
"def n_classes(self):\n raise NotImplementedError"
] | [
"0.7139616",
"0.6484298",
"0.645103",
"0.6282322",
"0.6265374",
"0.62360275",
"0.6218017",
"0.61864156",
"0.6164485",
"0.6155118",
"0.6132282",
"0.6087209",
"0.6073724",
"0.6055184",
"0.6008472",
"0.59811527",
"0.59720916",
"0.59651655",
"0.5958494",
"0.59583336",
"0.59582025",
"0.5952681",
"0.59431595",
"0.594036",
"0.59381676",
"0.5920385",
"0.59068537",
"0.58975023",
"0.5871992",
"0.5871992"
] | 0.85412234 | 0 |
Updates a testing scenario. | def put(self, id):
data = request.json
update_scenario(id, data)
return None, 204 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_update_scenario(self):\n pass",
"def test_update(self):\n payload = {\n 'name': 'Pecho inclinado',\n 'description': \"New description\",\n 'muscle_group': \"pecho\"\n }\n response = self.client.put(\n '/exercises/{}/'.format(self.exer1.id), data=payload)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(\n Exercise.objects.get(id=self.exer1.id).name, payload['name'])",
"def test_update_case(self):\n pass",
"def put(self, request):\n\n data = request.data\n test_id = data['test_id']\n data.pop(\"test_id\")\n test_data = UserTestHistory.objects.filter(id=test_id)\n\n try:\n test_data.update(**data)\n LOGGER.info(\"Test data updated successfully\")\n return Response({\"status\": \"SUCCESS\", \"message\": \"Record updated successfully\"})\n except Exception, error:\n LOGGER.error(\"Error:%s\", str(error))\n return Response({\"status\": \"FAILED\", \"message\": str(error)})",
"def test_update(self):\n obj = self.provision_single_asset()\n test_string = \"testing this thing\"\n p = {'id': obj.id, 'description': test_string}\n self.put('widget', 200, params=p)\n self.session.refresh(obj)\n assert obj.description == test_string",
"def test_update(self):\n payload = {\n 'id': self.rout1.id,\n 'name': 'Tuesday routine',\n 'exercises': [self.exer1.id]\n }\n response = self.client.put(\n '/routines/{}/'.format(self.rout1.id), data=payload)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(\n Routine.objects.get(id=self.rout1.id).name, payload['name'])",
"def update(self, expectation_suite: es.ExpectationSuite) -> es.ExpectationSuite:\n _client = client.get_instance()\n path_params = [\n \"project\",\n _client._project_id,\n \"featurestores\",\n self._feature_store_id,\n \"featuregroups\",\n self._feature_group_id,\n \"expectationsuite\",\n expectation_suite.id,\n ]\n\n headers = {\"content-type\": \"application/json\"}\n payload = expectation_suite.json()\n\n major, minor = self._variable_api.parse_major_and_minor(\n self._variable_api.get_version(\"hopsworks\")\n )\n method = \"PUT\"\n if major == \"3\" and minor == \"0\":\n method = \"POST\"\n del path_params[-1]\n\n return es.ExpectationSuite.from_response_json(\n _client._send_request(method, path_params, headers=headers, data=payload)\n )",
"def test_update(self):\n pass",
"def test_update(self):\n pass",
"def test_update(self):\n pass",
"def test_update(self):\n # this is tested graphically, as it is UI\n pass",
"def scenario(self, scenario):\n self.report_scenario_completed()\n self.current_scenario = scenario",
"def test_update_goal(self):\n pass",
"def test_update(self, client, stage, agent_token):\n stage_url = stage_url_for(stage)\n response = client.put(\n stage_url,\n headers={'x_dockci_api_key': agent_token},\n data={'success': 'false'},\n )\n\n assert response.status_code == 200\n\n response_data = json.loads(response.data.decode())\n assert response_data.pop('success') == False\n\n response = client.get(stage_url)\n response_data = json.loads(response.data.decode())\n assert response_data.pop('success') == False",
"def test_update_activity(self):\n pass",
"def update(self, **payload):\n update_story_url =\"https://www.pivotaltracker.com/services/v5/projects/{}/stories/{}\".format(self.project_id, self.story_id)\n return _perform_pivotal_put(update_story_url, payload)",
"def taco_test_put_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))",
"def test_update_sample(self):\n response = self.client.post(reverse('update-proband', args=[self.gel_ir.id]),\n {'outcome': 'testoutcome',\n 'comment': 'testcomment',\n 'case_status': 'N',\n 'pilot_case': True,\n 'mdt_status': 'R',\n 'case_sent': False,\n 'no_primary_findings': False},\n follow=True)\n self.assertContains(response, 'Proband Updated')\n self.assertEquals(response.status_code, 200)\n proband = Proband.objects.get(id=self.proband.id)\n gelir = GELInterpretationReport.objects.get(id=self.gel_ir.id)\n self.assertEqual(proband.comment, 'testcomment')\n self.assertEqual(gelir.pilot_case, True)",
"def test_update_study(self):\n study_spec = sample_study_spec()\n study_id = self.storage.create_study(study_spec)\n self.assertEqual(study_pb2.StudySpec.STATE_ENABLED, study_spec.state)\n self.assertEqual('test', study_spec.name)\n creation_time = study_spec.creation_time.ToDatetime()\n\n study_spec.name = 'changed test'\n study_spec.state = study_pb2.StudySpec.STATE_DISABLED\n study_spec.creation_time.GetCurrentTime()\n self.storage.update_study(study_spec)\n\n study_spec = self.storage.get_study(study_id)\n self.assertIsNotNone(study_spec)\n assert study_spec # To disable attribute-error\n self.assertEqual('changed test', study_spec.name)\n # Creation time and status should not change.\n self.assertEqual(study_pb2.StudySpec.STATE_ENABLED, study_spec.state)\n self.assertEqual(creation_time, study_spec.creation_time.ToDatetime())",
"def test_update(self, init_db, audit):\n params = {\n \"resource_type\": \"Category\",\n \"action\": \"Updated\",\n \"activity\": \"changed name\"\n }\n audit.update(**params)\n assert audit.resource_type == params['resource_type']\n assert audit.action == params['action']\n assert audit.activity == params['activity']",
"def test_update_goal(self, segment_call):\n self.post_course_goal(valid=True, goal_key='explore')\n self.post_course_goal(valid=True, goal_key='certify')\n self.post_course_goal(valid=True, goal_key='unsure')\n\n segment_call.assert_any_call(self.user.id, EVENT_NAME_ADDED, {\n 'courserun_key': str(self.course.id), 'goal_key': 'explore',\n 'days_per_week': 0,\n 'subscribed_to_reminders': False,\n })\n segment_call.assert_any_call(self.user.id, EVENT_NAME_UPDATED, {\n 'courserun_key': str(self.course.id), 'goal_key': 'certify',\n 'days_per_week': 0,\n 'subscribed_to_reminders': False,\n })\n segment_call.assert_any_call(self.user.id, EVENT_NAME_UPDATED, {\n 'courserun_key': str(self.course.id), 'goal_key': 'unsure',\n 'days_per_week': 0,\n 'subscribed_to_reminders': False,\n })\n current_goals = CourseGoal.objects.filter(user=self.user, course_key=self.course.id)\n assert len(current_goals) == 1\n assert current_goals[0].goal_key == 'unsure'",
"def update_story():\n client = RequestManager()\n client.set_method(\"PUT\")\n client.set_endpoint(\"/projects/{0}/stories/{1}\".format(STORED_ID['project_id'], STORED_ID['story_id']))\n groups_list = [\"scheduled\", \"unscheduled\", \"current\"]\n current_state_list = [\"accepted\", \"delivered\", \"finished\", \"started\",\n \"rejected\", \"planned\", \"unstarted\", \"unscheduled\"]\n body = {\"group\": choices(groups_list), \"current_state\": choices(current_state_list)}\n client.set_body(json.dumps(body))\n client.execute_request()",
"def put(self, request):\n\n data = request.data\n test_type_id = data['test_type_id']\n data.pop(\"test_type_id\")\n test_type = TestType.objects.filter(id=test_type_id)\n\n try:\n test_type.update(**data)\n LOGGER.info(\"Test type data updated successfully\")\n return Response({\"status\": \"SUCCESS\", \"message\": \"Record updated successfully\"})\n except Exception, error:\n LOGGER.error(\"Error:%s\", str(error))\n return Response({\"status\": \"FAILED\", \"message\": str(error)})",
"def test_update_workout(self):\n body = Workout()\n response = self.client.open(\n '/workout/{id}'.format(id='id_example'),\n method='PUT',\n data=json.dumps(body),\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))",
"def test_update_one(self):\n pass",
"def update(self, dbtestcase):\n for key, value in self._data.items():\n setattr(dbtestcase, key, value)\n for key, value in self._many2many.items():\n setattr(dbtestcase, key, value)\n _dbsession.commit()",
"def test_update_category(self):\n category = sample_category()\n url = category_details_url(category.id)\n self.client.put(url, {\"name\": \"school\"})\n category.refresh_from_db()\n self.assertEqual(category.name, 'school')",
"def test_update_state1(self):\n pass",
"def test_update_inventory(self):\n pass",
"def test_update_goal_metric(self):\n pass"
] | [
"0.71119636",
"0.63907033",
"0.6312588",
"0.6248452",
"0.6179395",
"0.61243194",
"0.610072",
"0.60918045",
"0.60918045",
"0.60918045",
"0.60252446",
"0.5980113",
"0.59608054",
"0.5916117",
"0.5899539",
"0.5895658",
"0.58752656",
"0.5818258",
"0.5801083",
"0.57950765",
"0.579374",
"0.5792379",
"0.5772244",
"0.5753141",
"0.57508904",
"0.57438534",
"0.5602",
"0.5588147",
"0.5587667",
"0.556415"
] | 0.66489846 | 1 |
Mix an image by a constant base color. The base color should be a 1by3 arraylike object representing an RGB color in [0, 255]^3 space. For example, to mix with orange, the transformation RGBTransform().mix_with((255, 127, 0)) might be used. The factor controls the strength of the color to be added. If the factor is 1.0, all pixels will be exactly the new color; if it is 0.0, the pixels will be unchanged. | def mix_with(self, base_color, factor=1.0):
base_color = _to_rgb(base_color, "base_color")
operation = _embed44((1 - factor) * np.eye(3))
operation[:3, 3] = factor * base_color
return self._then(operation) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def mix(\n self,\n color: ColorInput,\n percent: float = util.DEF_MIX,\n *,\n in_place: bool = False,\n **interpolate_args: Any\n ) -> 'Color':\n\n # Mix really needs to be between 0 and 1 or steps will break\n domain = interpolate_args.get('domain')\n if domain is not None:\n interpolate_args['domain'] = interpolate.normalize_domain(domain)\n\n if not self._is_color(color) and not isinstance(color, (str, Mapping)):\n raise TypeError(\"Unexpected type '{}'\".format(type(color)))\n mixed = self.interpolate([self, color], **interpolate_args)(percent)\n return self._hotswap(mixed) if in_place else mixed",
"def _color(self, x, factor):\r\n factor = (factor/MAX_LEVEL) * 1.8 + .1\r\n degenerate = tf.image.grayscale_to_rgb(tf.image.rgb_to_grayscale(x))\r\n x = tfa.image.blend(degenerate, tf.cast(x, tf.float32), factor)\r\n return tf.saturate_cast(x, tf.uint8)",
"def mix(self, other, coef=0.5):\n def m(a, b):\n return a * (1 - coef) + b * coef\n\n return Color(from_rgba=(c(m(self.r, other.r)),\n c(m(self.g, other.g)),\n c(m(self.b, other.b)),\n c(m(self.a, other.a))))",
"def mixfactor(self, segment):\n mixfactor = 0\n a = (89.0/1.5) + self.template['mixpoint']\n b = (188.0/1.5) + self.template['mixpoint']\n loud = self.loudness(self.original.analysis.segments, segment)\n if not loud:\n loud = self.original.analysis.loudness\n if loud != -1 * b:\n mixfactor = float(float(loud + a)/float(loud + b))\n if mixfactor > 0.8:\n mixfactor = 0.8\n elif mixfactor < 0.3:\n mixfactor = 0.3\n return mixfactor",
"def multiply_color(clip, factor):\n return clip.image_transform(\n lambda frame: np.minimum(255, (factor * frame)).astype(\"uint8\")\n )",
"def color(image, factor):\n image = tf.convert_to_tensor(image)\n dtype = image.dtype\n if dtype not in [tf.uint8, tf.int32, tf.uint16, tf.int64, tf.uint32]:\n image = tf.cast(image * 255.0, tf.uint8)\n degenerate = tf.image.grayscale_to_rgb(tf.image.rgb_to_grayscale(image))\n degenerate = blend(degenerate, image, factor)\n if dtype not in [tf.uint8, tf.int32, tf.uint16, tf.int64, tf.uint32]:\n degenerate = tf.cast(degenerate, tf.float32) / 255\n return degenerate",
"def blend(image1, image2, factor):\n assert 0.0 <= factor <= 1.0\n image1 = tf.convert_to_tensor(image1)\n image2 = tf.convert_to_tensor(image2)\n dtype = image1.dtype\n if factor == 0.0:\n return image1\n if factor == 1.0:\n return image2\n\n image1 = tf.cast(image1, tf.float32)\n image2 = tf.cast(image2, tf.float32)\n assert image1.shape == image2.shape\n difference = image2 - image1\n scaled = factor * difference\n temp = image1 + scaled\n flip = 255 if dtype == tf.uint8 else 1.0\n temp = tf.clip_by_value(temp, 0.0, flip)\n return tf.cast(temp, dtype)",
"def driftColor(baseColor, factor=110):\n if baseColor.lightness() > 128:\n return baseColor.darker(factor)\n else:\n return baseColor.lighter(factor+10)",
"def blend(image1, image2, factor, name=None):\n _check_image_dtype(image1)\n _check_image_dtype(image2)\n assert image1.dtype == image2.dtype, \"image1 type should exactly match type of image2\"\n\n if factor == 0.0:\n return image1\n elif factor == 1.0:\n return image2\n else:\n with tf.name_scope(name or \"blend\"):\n orig_dtype = image2.dtype\n\n image1, image2 = tf.image.convert_image_dtype(image1, tf.float32), tf.image.convert_image_dtype(image2, tf.float32)\n scaled_diff = (image2 - image1) * factor\n\n blended_image = image1 + scaled_diff\n\n blended_image = tf.image.convert_image_dtype(blended_image, orig_dtype, saturate=True)\n return blended_image",
"def adjust_contrast(image, factor):\r\n mean = image.mean(axis=0).mean(axis=0)\r\n return _clip((image - mean) * factor + mean)",
"def brightness(self, factor):\n\n channels = [\"r\", \"g\", \"b\"]\n total_lumes = clamp(self.get_luminance() + (255.0 * factor) - 255.0, 0.0, 255.0)\n\n if total_lumes == 255.0:\n # white\n self.r, self.g, self.b = 0xFF, 0xFF, 0xFF\n elif total_lumes == 0.0:\n # black\n self.r, self.g, self.b = 0x00, 0x00, 0x00\n else:\n # Adjust Brightness\n pts = (total_lumes - 0.299 * self.r - 0.587 * self.g - 0.114 * self.b)\n slots = set(channels)\n components = [float(self.r) + pts, float(self.g) + pts, float(self.b) + pts]\n count = 0\n for c in channels:\n overage, components[count] = self._get_overage(components[count])\n if overage:\n slots.remove(c)\n components = list(self._distribute_overage(components, overage, slots))\n count += 1\n\n self.r = clamp(round_int(components[0]), 0, 255) & 0xFF\n self.g = clamp(round_int(components[1]), 0, 255) & 0xFF\n self.b = clamp(round_int(components[2]), 0, 255) & 0xFF",
"def normalize_image(self, factor, luminosity=None):\n if not luminosity:\n luminosity = self.average_luminosity()\n\n for i in range(len(self.pixels)):\n self.pixels[i] = self.pixels[i] * (factor / luminosity)",
"def mix(src_color, src_f, dst_color, dst_f):\n src_a = src_color[:, 3] / 255\n dst_a = dst_color[:, 3] / 255\n out_a = src_a * src_f + dst_a * dst_f\n outafilter = out_a > 0\n out_rgb = np.zeros((src_color.shape[0], 3), dtype='u1')\n out_rgb[outafilter] = np.clip(np.round((src_color[outafilter, 0:3] * np.tile(src_a[outafilter].reshape(-1, 1), (1, 3)) * np.tile(src_f[outafilter].reshape(-1, 1), (1, 3)) + dst_color[outafilter, 0:3] * np.tile(dst_a[outafilter].reshape(-1, 1), (1, 3)) * np.tile(dst_f[outafilter].reshape(-1, 1), (1, 3))) / np.tile(out_a[outafilter].reshape(-1, 1), (1, 3))), 0, 255)\n return np.concatenate([out_rgb, np.clip(np.round(out_a * 255), 0, 255).reshape(-1, 1)], axis=1).astype('u1').copy()",
"def downsample(self, factor):\n self.img = self.img[::factor, ::factor, :] if self.fast else self.img\n self.comb_structure_mask = self.comb_structure_mask[::factor, ::factor]\n self.unknown_mask = self.unknown_mask[::factor, ::factor]",
"def monochrome(image_path, factor=0):\n BasicTransform.saturate(image_path, factor)",
"def mix_colors(color1: Color, color2: Color, mix_amount: float) -> Color:\n return [(1-mix_amount)*v1 + mix_amount*v2 for v1, v2 in zip(color1, color2)]",
"def scale(self, factor):\n self.b = factor * self.b",
"def adjust_saturation(image, factor):\r\n image[..., 1] = np.clip(image[..., 1] * factor, 0, 255)\r\n return image",
"def blend(c: float, a: float) -> float:\n return 255 + (c - 255) * a",
"def saturation(self, factor):\n\n h, l, s = self.tohls()\n s = clamp(s + factor - 1.0, 0.0, 1.0)\n self.fromhls(h, l, s)",
"def adjust_brightness(img, brightness_factor):\n _assert_image_tensor(img, 'CHW')\n assert brightness_factor >= 0, \"brightness_factor should be non-negative.\"\n assert _get_image_num_channels(img, 'CHW') in [\n 1,\n 3,\n ], \"channels of input should be either 1 or 3.\"\n\n extreme_target = paddle.zeros_like(img, img.dtype)\n return _blend_images(img, extreme_target, brightness_factor)",
"def Contrast(img):\r\n factor = 2 * (np.random.rand() - 0.5) * 128\r\n assert (factor <= 128 and factor >= -128), 'contract factor value wrong'\r\n fvalue = 259.0/255.0 * (factor + 255.0)/(259.0-factor)\r\n img = np.round((img - 128.0)*fvalue + 128.0)\r\n img = np.where(img > 255, 255, img)\r\n img = np.where(img < 0, 0, img)\r\n img = np.uint8(img)\r\n return img",
"def mix(a, b, amount):\n return ((1.0 - amount) * a) + (amount * b)",
"def saturate(image_path, factor=4):\n BasicTransform.convert_image(image_path)\n\n with Image.open(image_path) as img:\n filter = ImageEnhance.Color(img)\n new_image = filter.enhance(factor)\n new_image.save(image_path)",
"def __init__(self, incoming, factor, name='RGBtoGrayLayer'):\n super(MultiplyFactorLayer, self).__init__()\n with tf.variable_scope(name) as self.layer_scope:\n self.incoming, self.incoming_shape = get_input(incoming)\n self.output_shape = self.incoming_shape\n \n self.out = None\n self.name = name\n self.factor = factor",
"def mul(self, factor):\n if factor == 1.:\n return\n new_channel = sppasChannel()\n new_channel.set_sampwidth(self._sampwidth)\n new_channel.set_framerate(self._framerate)\n a = sppasAudioFrames(self._channel.get_frames(self._channel.get_nframes()), self._channel.get_sampwidth(), 1)\n new_channel.set_frames(a.mul(factor))\n\n self._channel = new_channel",
"def brighten(rgb, factor):\n return [min(255, int(round(factor * c))) for c in rgb]",
"def __itruediv__(self, factor):\n self.components = [c / factor for c in self.components]\n return self",
"def brighten(image_path, factor=1.5):\n BasicTransform.darken(image_path, factor)",
"def factor(self, key, factor):\n self.dist[key] *= factor\n self.normalize()"
] | [
"0.63442767",
"0.5894581",
"0.5830767",
"0.5649231",
"0.5571899",
"0.5529526",
"0.5451907",
"0.536669",
"0.53180206",
"0.5242517",
"0.51597506",
"0.5142259",
"0.5101442",
"0.5004268",
"0.49906832",
"0.49472693",
"0.48828548",
"0.4848855",
"0.48302355",
"0.4824658",
"0.48208082",
"0.48051783",
"0.47939765",
"0.47575587",
"0.4741046",
"0.4691326",
"0.46719196",
"0.46538633",
"0.46522072",
"0.46440622"
] | 0.80262935 | 0 |
Apply this transformation to a copy of the given RGB image. The image should be a PIL image with at least three channels. Specifically, the RGB and RGBA modes are both supported, but L is not. Any channels past the first three will pass through unchanged. The original image will not be modified; a new image of the same mode and dimensions will be returned. | def applied_to(self, image):
# PIL.Image.convert wants the matrix as a flattened 12-tuple.
# (The docs claim that they want a 16-tuple, but this is wrong;
# cf. _imaging.c:767 in the PIL 1.1.7 source.)
matrix = tuple(self.get_matrix().flatten())
channel_names = image.getbands()
channel_count = len(channel_names)
if channel_count < 3:
raise ValueError("Image must have at least three channels!")
elif channel_count == 3:
return image.convert('RGB', matrix)
else:
# Probably an RGBA image.
# Operate on the first three channels (assuming RGB),
# and tack any others back on at the end.
channels = list(image.split())
rgb = PIL.Image.merge('RGB', channels[:3])
transformed = rgb.convert('RGB', matrix)
new_channels = transformed.split()
channels[:3] = new_channels
return PIL.Image.merge(''.join(channel_names), channels) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __enhance_image(self, img):\n\n blue = self.g.clahe.apply(img[:,:,0])\n green = self.g.clahe.apply(img[:,:,1])\n red = self.g.clahe.apply(img[:,:,2])\n img[:,:,0] = blue\n img[:,:,1] = green\n img[:,:,2] = red\n return img",
"def colorize(image, newColor):\n image = image.copy()\n\n # zero out RGB values\n image.fill((0, 0, 0, 255), None, pg.BLEND_RGBA_MULT)\n # add in new RGB values\n image.fill(newColor[0:3] + (0,), None, pg.BLEND_RGBA_ADD)\n\n return image",
"def colorize(image, newColor):\n image = image.copy()\n\n # zero out RGB values\n image.fill((0, 0, 0, 255), None, pygame.BLEND_RGBA_MULT)\n # add in new RGB values\n image.fill(newColor[0:3] + [0,], None, pygame.BLEND_RGBA_ADD)\n\n return image",
"def __call__(self, img):\r\n channels = self.channels\r\n img_copy = np.zeros([channels, img.shape[0], img.shape[1]])\r\n\r\n for i in range(channels):\r\n img_copy[i, :, :] = np.reshape(img, [1, img.shape[0], img.shape[1]]).copy()\r\n\r\n if not isinstance(img_copy, np.ndarray) and (img_copy.ndim in {2, 3}):\r\n raise TypeError('img should be ndarray. Got {}'.format(type(img_copy)))\r\n\r\n if isinstance(img_copy, np.ndarray):\r\n # handle numpy array\r\n img_copy = torch.from_numpy(img_copy)\r\n # backward compatibility\r\n return img_copy.float()",
"def copy_image(img: Image) -> Image:\n width, height = img.size\n new_img = Image.new(img.mode, img.size)\n new_pixels = new_img.load() # New Image pixels, default: all black.\n pixels = img.load() # Input Image pixels.\n for x in range(width):\n for y in range(height):\n new_pixels[x,y] = pixels[x,y]\n return new_img",
"def rgb_image(self):\n z3 = self.z[:,:,newaxis]\n return z3 * self.c",
"def reduce_color(image):\n\n # http://stackoverflow.com/questions/5906693/how-to-reduce-the-number-of-colors-in-an-image-with-opencv-in-python\n w, h, _ = image.shape\n for row in xrange(h-1):\n for col in xrange(w-1):\n #pi = row * w * 3 + col * 3\n pixel = image[col][row]\n pixel[0] = __reduceColorValue(pixel[0])\n pixel[1] = __reduceColorValue(pixel[1])\n pixel[2] = __reduceColorValue(pixel[2])\n return image",
"def img_to_rgb(img):\r\n if len(img.shape) < 3 or img.shape[2] == 1:\r\n return np.repeat(img, 3).reshape(img.shape[0], img.shape[1], 3)\r\n else:\r\n return img",
"def rgb_processing(rgb_img, center, scale, rot=0):\n rgb_img = crop(rgb_img, center, scale, \n [constants.IMG_RES, constants.IMG_RES], rot=rot)\n # (3,224,224),float,[0,1]\n rgb_img = np.transpose(rgb_img.astype('float32'),(2,0,1))/255.0\n return rgb_img",
"def red_filter(img):\r\n #with Image.open(filename) as img:\r\n w = img.width\r\n h = img.height\r\n\r\n newimg = Image.new('RGB', (w,h))\r\n for y in range(h):\r\n for x in range(w):\r\n r, g, b = img.getpixel((x,y))\r\n \r\n newimg.putpixel((x, y), (r, 0, 0))\r\n \r\n return newimg",
"def to_color(self):\n if self.channels == 4:\n color = opencv.cvtColor(self.img, opencv.COLOR_BGRA2BGR)\n return Image(color)\n elif self.channels == 1:\n color = opencv.cvtColor(self.img, opencv.COLOR_GRAY2BGR)\n return Image(color)\n else:\n return Image(self.img)",
"def process_image(image):\n # Open the image using PIL\n pil_image = Image.open(image)\n \n # Resize the image to 256x256 while maintining aspect ratio\n if pil_image.width > pil_image.height:\n resize_dim = (int(pil_image.width*256 / pil_image.height), 256)\n else:\n resize_dim = (256, int(pil_image.height*256 / pil_image.width))\n \n pil_image = pil_image.resize(resize_dim)\n \n # Crop image to center 224 pixles\n crop_box_dim = 224\n left = (pil_image.width - crop_box_dim)/2\n top = (pil_image.height - crop_box_dim)/2\n right = pil_image.width - (pil_image.width - crop_box_dim)/2\n bottom = pil_image.height - (pil_image.height - crop_box_dim)/2\n pil_image = pil_image.crop((left, top, right, bottom))\n \n # Update color channels\n np_image = np.array(pil_image)\n np_image_means = np.array([0.485, 0.456, 0.406])\n np_image_stddev = np.array([0.229, 0.224, 0.225])\n np_image = (np_image/255 - np_image_means) / np_image_stddev\n \n # PIL images and numpy arrays have color channels in the 3rd dimension\n # Transpose them to first dimension to match what PyTorch expects\n np_image = np_image.transpose((2,0,1))\n\n return np_image",
"def grey_to_rgb(im):\n assert im.n_channels in [1, 3]\n\n if im.n_channels == 3:\n return im\n\n im.pixels = np.vstack([im.pixels] * 3)\n return im",
"def grayscale(image: Image) -> Image:\r\n new_image = copy(image)\r\n for x,y,(r,g,b) in image:\r\n pix_bright = (r+g+b)//3\r\n Gray = create_color(pix_bright,pix_bright,pix_bright)\r\n set_color(new_image,x,y,Gray) \r\n return new_image",
"def transform_image(self):\n im = cv2.imread(\"result.png\", 0)\n im2 = cv2.resize(im, (28, 28))\n im = im2.reshape(28, 28, -1)\n im = im.reshape(1, 1, 28, 28)\n im = cv2.bitwise_not(im)\n im = im.reshape(28,28)\n \n with out:\n clear_output()\n \n # resize\n img = np.array(im)\n img = img.reshape(28*28,)\n \n #img = img/255.0\n \n return img",
"def _preprocess(self, image):\n\n # Scale from [0, 255] to [0, 1] and BGR to RGB \n return (image / 255.0)[:, :, ::-1]",
"def reconstructImage(self,arr):\n\t\tarr = arr * 256\n\t\tarr = np.array(np.round(arr),dtype=np.uint8)\n\t\t#arr = np.array(arr,dtype=np.uint8)\n\n\t\t# We need to transpose the array because we flatten X by columns\n\t\t#arr = arr.T\n\t\t#a = arr.reshape((self.width, self.height,3))\n\t\t\n\t\tif self.mode == 'L':\n\t\t\ta = arr.reshape((self.width, self.height))\n\t\telse:\n\t\t\ta = arr.reshape((self.width, self.height,3))\n\n\t\t#a = arr.reshape((3,self.width, self.height))\t\t\n\t\t#a = arr.transpose(0, 3, 1, 2)\n\n\t\tim = Image.fromarray(a,mode=self.mode)\n\n\t\treturn im",
"def to_pillow(self) -> PILImage:\n return PILImage.fromarray(self.rgb().to_numpy())",
"def convert_img(self):\r\n self.img = self.img.convert('RGB')",
"def lab_to_rgb(image: tf.Tensor) -> tf.Tensor:\n xyz = lab_to_xyz(image)\n rgb_image = xyz_to_rgb(xyz)\n return rgb_image",
"def preprocess_image(self, img):\r\n\r\n # if channel 1 then as grayscale\r\n try:\r\n if img.shape[1] / img.shape[0] < 6.4:\r\n img = pad_image(img, (self.crnn_cfg().width, self.crnn_cfg().height), self.crnn_cfg().nb_channels)\r\n else:\r\n img = resize_image(img, (self.crnn_cfg().width, self.crnn_cfg().height))\r\n if self.crnn_cfg().nb_channels == 1:\r\n img = img.transpose([1, 0])\r\n else:\r\n img = img.transpose([1, 0, 2])\r\n\r\n img = np.flip(img, 1)\r\n img = img / 255.0\r\n if self.crnn_cfg().nb_channels == 1:\r\n img = img[:, :, np.newaxis]\r\n return img\r\n except:\r\n print('Error in method {0} in module {1}'.format('preprocess_image', 'crnn_bridge.py'))\r\n return None",
"def __call__(\n self,\n image: np.ndarray,\n f_keep_pixels: float = 0,\n f_keep_colored_pixels: float = 0,\n ) -> np.ndarray:\n # Store shape\n h, w, c = image.shape\n\n img_np = image\n\n # Apply transformations\n image: torch.Tensor = self.transform(Image.fromarray(image))\n image = image.to(self.device)\n\n # Copy the numpy array because it's not writeable otherwise\n # Bring into shape [1,1,h,w]\n image.unsqueeze_(0)\n\n # Inference\n result = self.model.networks.g_b_to_a.forward(image).detach()\n\n # From [-1,1] to [0,256]\n result = tensor2im(result, to_rgb=False)\n\n # Resize to the size the input image has\n result = cv2.resize(result, dsize=(w, h), interpolation=cv2.INTER_LINEAR)\n\n if f_keep_pixels > 0:\n grey_img = cv2.cvtColor(img_np, cv2.COLOR_BGR2GRAY)\n colored_pxls = f_keep_pixels * np.ones((h, w))\n\n result = (1 - f_keep_pixels) * result + f_keep_pixels * grey_img\n\n if f_keep_colored_pixels > 0:\n grey_img = cv2.cvtColor(img_np, cv2.COLOR_BGR2GRAY)\n colored_pxls = f_keep_colored_pixels * np.ones((h, w))\n colored_pxls[img_np[:, :, 0] == img_np[:, :, 1]] = 0\n\n result = (\n np.ones_like(colored_pxls) - colored_pxls\n ) * result + colored_pxls * grey_img\n\n return result.astype(np.uint8)",
"def apply_image_function(\n self,\n image_function: Callable[[int, int, int, int], Tuple4IntType]\n ) -> 'BaseImage':\n w, h = self._surface.get_size()\n for x in range(w):\n for y in range(h):\n r, g, b, a = self._surface.get_at((x, y))\n r, g, b, a = image_function(r, g, b, a)\n r = int(max(0, min(r, 255)))\n g = int(max(0, min(g, 255)))\n b = int(max(0, min(b, 255)))\n a = int(max(0, min(a, 255)))\n # noinspection PyArgumentList\n self.set_at((x, y), pygame.Color(r, g, b, a))\n return self",
"def img_recolor(self, args, input_image_path):\n \n ec = encoder.Encoder(output_path=args.intermediate_representation, method=args.method,\n size=args.size, p=args.p, grid_size=args.grid_size, plot=args.plot, quantize=args.quantize)\n dc = decoder.Decoder(output_path=args.output_path, method=args.method, size=args.size, p=args.p, gpu_id=args.gpu_id, plot=args.plot)\n\n ec.encode(input_image_path)\n img_gray_name = ar_utils.gen_new_gray_filename(input_image_path)\n img_gray_path = os.path.join(args.intermediate_representation, img_gray_name)\n dc.decode(img_gray_path)\n\n if args.delete_gray and os.path.exists(img_gray_path):\n os.remove(img_gray_path)",
"def grey_to_rgb_imitation(img):\n return np.repeat(img[...,np.newaxis], 3, -1)",
"def prepare(img,\n resize=False, new_size=(64, 64),\n apply_contrast=False, contrast_channels=(0, 1, 2)\n ):\n new_img = crop(img)\n\n if resize:\n new_img = cv2.resize(new_img, new_size)\n\n if apply_contrast:\n new_img = increase_contrast(new_img, channels=contrast_channels)\n\n return new_img",
"def image(self, img):\n # determine our effective width/height, taking rotation into account\n width = self.width\n height = self.height\n if self.rotation in (1, 3):\n width, height = height, width\n\n if isinstance(self.format, (RGB565Format, RGB888Format)) and img.mode != \"RGB\":\n raise ValueError(\"Image must be in mode RGB.\")\n if isinstance(self.format, (MHMSBFormat, MVLSBFormat)) and img.mode != \"1\":\n raise ValueError(\"Image must be in mode 1.\")\n\n imwidth, imheight = img.size\n if imwidth != width or imheight != height:\n raise ValueError(\n f\"Image must be same dimensions as display ({width}x{height}).\"\n )\n # Grab all the pixels from the image, faster than getpixel.\n pixels = img.load()\n # Clear buffer\n for i in range(len(self.buf)): # pylint: disable=consider-using-enumerate\n self.buf[i] = 0\n # Iterate through the pixels\n for x in range(width): # yes this double loop is slow,\n for y in range(height): # but these displays are small!\n if img.mode == \"RGB\":\n self.pixel(x, y, pixels[(x, y)])\n elif pixels[(x, y)]:\n self.pixel(x, y, 1) # only write if pixel is true",
"def red_channel(img):\n\n red = np.zeros(img.shape,dtype=float)\n\n red[:,:,2] = np.copy(img[:,:,2])\n\n return red",
"def lab_to_rgb(img):\n new_img = np.zeros((256, 256, 3))\n for i in range(len(img)):\n for j in range(len(img[i])):\n pix = img[i, j]\n new_img[i, j] = [(pix[0] + 1) * 50, (pix[1] + 1) / 2 * 255 - 128, (pix[2] + 1) / 2 * 255 - 128]\n new_img = color.lab2rgb(new_img) * 255\n new_img = new_img.astype('uint8')\n return new_img",
"def _process_img_rgb(self, sensor_data):\n img = np.array(sensor_data.raw_data).reshape((self.img_y, self.img_x, 4))\n img = img[:, :, :3] # sensor is actualy rgba, we dont need alpha values\n self.rgb = img # need to scale rgb values to be {0,1}"
] | [
"0.61241204",
"0.59882116",
"0.59521466",
"0.5738032",
"0.5636173",
"0.56255084",
"0.5623958",
"0.559346",
"0.5581521",
"0.5567112",
"0.5559615",
"0.5529105",
"0.55037665",
"0.5473657",
"0.54565656",
"0.54229414",
"0.5404122",
"0.53800154",
"0.5358894",
"0.5328545",
"0.5306468",
"0.530261",
"0.5284066",
"0.5269414",
"0.52518004",
"0.52453935",
"0.52295357",
"0.5225805",
"0.52228504",
"0.5196514"
] | 0.6715946 | 0 |
Embed a 4by4 or smaller matrix in the upperleft of I_4. | def _embed44(matrix):
result = np.eye(4)
r, c = matrix.shape
result[:r, :c] = matrix
return result | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build_augmented_matrix(self):\r\n for row in range(self.SIZE):\r\n self.matrix[row].append(self.result[row])",
"def matrix_3d_to_4x4(matrix: np.matrix) -> np.matrix:\n return np.matrix([\n [matrix.item(0, 0), matrix.item(0, 1), matrix.item(0, 2), 0],\n [matrix.item(1, 0), matrix.item(1, 1), matrix.item(1, 2), 0],\n [matrix.item(2, 0), matrix.item(2, 1), matrix.item(2, 2), 0],\n [0, 0, 0, 1]])",
"def getInverseMatrix(self) -> CMatrix4:\n ...",
"def _mat3(self):\n if self.frame.orientation == HillFrame.DEFAULT_ORIENTATION:\n return np.identity(3)\n else:\n return self.QSW2TNW",
"def Controlled(U):\n shp = U.shape\n new_ten = scipy.linalg.block_diag(np.eye(*shp), U)\n return new_ten.reshape(2, shp[0], 2, shp[1])",
"def row_matrix_col_4d(a, b, A):\n\n\treturn (a[0]*A[0][0]*b[0] + a[1]*A[1][0]*b[0] + a[2]*A[2][0]*b[0] + a[3]*A[3][0]*b[0] +\n\t a[0]*A[0][1]*b[1] + a[1]*A[1][1]*b[1] + a[2]*A[2][1]*b[1] + a[3]*A[3][1]*b[1] +\n\t a[0]*A[0][2]*b[2] + a[1]*A[1][2]*b[2] + a[2]*A[2][2]*b[2] + a[3]*A[3][2]*b[2] +\n\t a[0]*A[0][3]*b[3] + a[1]*A[1][3]*b[3] + a[2]*A[2][3]*b[3] + a[3]*A[3][3]*b[3])",
"def separate_augmented_matrix(self):\r\n for row in range(self.SIZE):\r\n self.result[row] = self.matrix[row][-1]\r\n self.matrix[row].pop()",
"def identMatrix(size):\n returnvalue = Matrix()\n for i in range(size):\n newrow = [0] * size\n newrow[i] = 1\n returnvalue.addRow(*newrow)\n return returnvalue",
"def conv3H4H(M):\n M = np.append(M.copy(), [[0, 0, 1]], 0) # add row\n return np.append(M, [[0], [0], [0], [0]], 1) # add column",
"def _prepare_outer_matrix(self):\n self._mat_plane = numpy.array([\n self._scaling[0], 0, 0, 0,\n 0, self._scaling[1], 0, 0,\n 0, 0, 1, 0,\n self.i_border[0], -self.i_border[1], 0, 1\n ], dtype=numpy.float32)",
"def identity_matrix():\r\n return numpy.identity(4)",
"def print_matrix_on_screen(matrix, width=5):\n for row in matrix:\n print(''.join(['{0:>{w}}'.format(item, w=width) for item in row]))",
"def identity_matrix():\n return numpy.identity(4)",
"def getMatrix(self) -> CMatrix4:\n ...",
"def d4out():\n\td4x.moveTo(d4x_out)\n\td4y.moveTo(d4y_out)",
"def warp(im, A, output_shape):\n invA = np.linalg.inv(A)\n warped = np.zeros(output_shape)\n for i in range(output_shape[0]):\n for j in range(output_shape[1]):\n ps = np.rint(np.dot(invA, [i, j, 1])).astype(int)\n if ps[0] >= 0 and ps[0] < output_shape[0] and ps[1] >=0 and ps[1] < output_shape[1]:\n warped[i][j] = im[ps[0]][ps[1]]\n return warped",
"def rellenarMatrix(self):\n for i in range(0, 26):\n self.matrixMAPA.append([])\n for j in range(0, 26):\n self.matrixMAPA[i].append((0, str(i)+\"-\"+str(j)))",
"def quatLeftMat(q):\n\ts = q[0]\n\tv = q[1:].reshape(-1,)\n\tL = np.zeros((4, 4))\n\tL[0, 0] = s\n\tL[0, 1:] = -v\n\tL[1:, 0] = v\n\tL[1:, 1:] = s*np.eye(3) + skewMat(v)\n\treturn L",
"def embed(self, ue):\n assert_condition(ue.size == self._mid_points, ValueError,\n \"Array to embed has the wrong size\")\n self[self.borders[0]:-self.borders[1]] = ue",
"def LotkaVolterra_InhibitMatrix(self):\n LV = -2 * (torch.ones(len(self.filler2index),\n len(self.filler2index)) - torch.eye(len(self.filler2index)))\n LV = LV.double()\n return LV",
"def T(self):\n # TODO - your code here\n transpose = []\n for col in range(self.w):\n new_row = []\n for row in range(self.h):\n new_row.append(self.g[row][col])\n transpose.append(new_row)\n return Matrix(transpose)\n # TODO - your code here",
"def get_stain_matrix(I):",
"def pivot(self, idx, is_row):\n mutable = [list(x) for x in self.faces]\n if is_row:\n # get the row, reverse it, and flip each element\n mutable[idx] = [0 if x == 1 else 1 for x in reversed(mutable[idx])]\n else:\n # find each corresponding column in each row, build the replacement, and sub it in\n tmp = [0 if x == 1 else 1 for x in reversed([row[idx] for row in mutable])]\n for i, row in enumerate(mutable):\n row[idx] = tmp[i]\n\n return Rubik2DBoard(self.rows, self.cols, data=tuple([tuple(x) for x in mutable]), prior_moves=self.moves + 1)",
"def define_orientation_matrix(self):\n from lmfit import Parameters\n p = Parameters()\n for i in range(3):\n for j in range(3):\n p.add('U%d%d' % (i, j), self.Umat[i, j])\n self.init_p = self.Umat\n return p",
"def inverse(self):\r\n \r\n Mi=mat4()\r\n d=self.determinant()\r\n for i in range(4):\r\n for j in range(4):\r\n sign=1-((i+j)%2)*2\r\n m3=self._submat(i,j)\r\n Mi[j,i]=sign*m3.determinant()/d\r\n return Mi",
"def identity(self):\r\n return mat4(1.0, 0.0, 0.0, 0.0,\r\n 0.0, 1.0, 0.0, 0.0,\r\n 0.0, 0.0, 1.0, 0.0,\r\n 0.0, 0.0, 0.0, 1.0)",
"def getTransposeMatrix(self) -> CMatrix4:\n ...",
"def d4in():\n\td4x.moveTo(d4x_in)\n\td4y.moveTo(d4y_in)",
"def twenty_four_cell(self):\n verts = []\n q12 = QQ(1)/2\n base = [q12,q12,q12,q12]\n for i in range(2):\n for j in range(2):\n for k in range(2):\n for l in range(2):\n verts.append([x for x in base])\n base[3] = base[3]*(-1)\n base[2] = base[2]*(-1)\n base[1] = base[1]*(-1)\n base[0] = base[0]*(-1)\n verts = verts + permutations([0,0,0,1])\n verts = verts + permutations([0,0,0,-1])\n return Polyhedron(vertices = verts)",
"def restore_orientation_matrix(self):\n self.Umat = self.init_p"
] | [
"0.56006694",
"0.553187",
"0.5439319",
"0.54256284",
"0.5364134",
"0.5332387",
"0.53140825",
"0.5310225",
"0.53080255",
"0.52915466",
"0.5281834",
"0.52639854",
"0.52455604",
"0.5245288",
"0.51908726",
"0.5160496",
"0.5122607",
"0.5120262",
"0.5118664",
"0.51174915",
"0.51112914",
"0.51046187",
"0.5022268",
"0.5010483",
"0.49915963",
"0.49694934",
"0.49663082",
"0.4940161",
"0.49123746",
"0.49054092"
] | 0.67249334 | 0 |
This will first try to load the specified module from the pyrominfo package using the current module search path. If it can't be found, then the parent directory is added to the module search path and the import attempt is repeated. | def loadModule(mod):
try:
# from pyrominfo import gameboy, etc
pyrominfo = __import__("pyrominfo", globals(), locals(), [mod])
except ImportError:
import os
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
os.sys.path.insert(0, parentdir)
pyrominfo = __import__("pyrominfo", globals(), locals(), [mod])
try:
return getattr(pyrominfo, mod)
except AttributeError:
raise ImportError("testutils.loadModule() can't find module %s in pyrominfo package" % mod) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _find_module(self, name, path, parent=None):\n\n if parent is not None:\n # assert path is not None\n fullname = parent.identifier + '.' + name\n else:\n fullname = name\n\n node = self.findNode(fullname)\n if node is not None:\n self.msg(3, \"find_module: already included?\", node)\n raise ImportError(name)\n\n if path is None:\n if name in sys.builtin_module_names:\n return (None, BUILTIN_MODULE)\n\n path = self.path\n\n return self._find_module_path(fullname, name, path)",
"def load_modules_manually():\n #cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0]))\n cmd_folder = '../myutils/'\n if cmd_folder not in sys.path:\n sys.path.insert(0, cmd_folder)\n #print sys.path",
"def lookupmodule(self, filename):\n if os.path.isabs(filename) and os.path.exists(filename):\n return filename\n f = os.path.join(sys.path[0], filename)\n if os.path.exists(f) and self.canonic(f) == self.mainpyfile:\n return f\n root, ext = os.path.splitext(filename)\n if ext == '':\n filename = filename + '.py'\n if os.path.isabs(filename):\n return filename\n for dirname in sys.path:\n while os.path.islink(dirname):\n dirname = os.readlink(dirname)\n fullname = os.path.join(dirname, filename)\n if os.path.exists(fullname):\n return fullname\n return None",
"def _import_from(mod, path, mod_dir=None):\n\n if mod_dir is None:\n mod_dir = mod\n\n if not os.path.exists(path):\n return None\n\n if not os.path.exists(os.path.join(path, mod_dir)):\n return None\n\n try:\n mod_info = imp.find_module(mod_dir, [path])\n return imp.load_module(mod, *mod_info)\n except ImportError:\n return None",
"def findModule(name):",
"def load_module(name_or_path):\n if os.path.exists(name_or_path):\n path = name_or_path.rstrip(\"/\")\n modname = os.path.splitext(os.path.basename(path))[0]\n if os.path.isdir(path):\n path = os.path.join(path, \"__init__.py\")\n spec = importlib.util.spec_from_file_location(modname, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n else:\n mod = importlib.import_module(name_or_path)\n try:\n path = mod.__path__[0]\n except AttributeError:\n path = mod.__file__\n return mod, path",
"def lookup_module(filename):\r\n\r\n # stolen from pdb\r\n import os\r\n import sys\r\n\r\n if os.path.isabs(filename) and os.path.exists(filename):\r\n return filename\r\n f = os.path.join(sys.path[0], filename)\r\n if os.path.exists(f): # and self.canonic(f) == self.mainpyfile:\r\n return f\r\n root, ext = os.path.splitext(filename)\r\n if ext == '':\r\n filename = filename + '.py'\r\n if os.path.isabs(filename):\r\n return filename\r\n for dirname in sys.path:\r\n while os.path.islink(dirname):\r\n dirname = os.readlink(dirname)\r\n fullname = os.path.join(dirname, filename)\r\n if os.path.exists(fullname):\r\n return fullname\r\n return None",
"def _import(module_name, dir_name):\n\n # assign module a name that's not likely to conflict\n safe_name = 'confab.data.' + module_name\n\n # check if module is already loaded\n existing = sys.modules.get(safe_name)\n if existing:\n return existing\n\n # try to load module\n module_info = imp.find_module(module_name, [dir_name])\n module = imp.load_module(safe_name, *module_info)\n return module",
"def _safe_import_module(\n self, module_partname, module_name, parent_module):\n self.msgin(3, \"safe_import_module\", module_partname, module_name, parent_module)\n\n # If this module has *NOT* already been imported, do so.\n module = self.findNode(module_name)\n if module is None:\n # List of the absolute paths of all directories to be searched for\n # this module. This effectively defaults to \"sys.path\".\n search_dirs = None\n\n # If this module has a parent package...\n if parent_module is not None:\n # ...with a list of the absolute paths of all directories\n # comprising this package, prefer that to \"sys.path\".\n if parent_module.packagepath is not None:\n search_dirs = parent_module.packagepath\n # Else, something is horribly wrong. Return emptiness.\n else:\n self.msgout(3, \"safe_import_module -> None (parent_parent.packagepath is None)\")\n return None\n\n try:\n pathname, loader = self._find_module(\n module_partname, search_dirs, parent_module)\n except ImportError as exc:\n self.msgout(3, \"safe_import_module -> None (%r)\" % exc)\n return None\n\n module = self._load_module(module_name, pathname, loader)\n\n # If this is a submodule rather than top-level module...\n if parent_module is not None:\n self.msg(4, \"safe_import_module create reference\", module, \"->\", parent_module)\n\n # Add an edge from this submodule to its parent module.\n self._updateReference(\n module, parent_module, edge_data=DependencyInfo(\n conditional=False,\n fromlist=False,\n function=False,\n tryexcept=False,\n ))\n\n # Add this submodule to its parent module.\n parent_module.add_submodule(module_partname, module)\n\n # Return this module.\n self.msgout(3, \"safe_import_module ->\", module)\n return module",
"def load_module(path: os.PathLike):\n path = Path(path)\n pwd = Path(os.getcwd())\n os.chdir(path.parent)\n try:\n mod = import_module(path.stem)\n except ModuleNotFoundError as err:\n raise err\n finally:\n os.chdir(pwd)\n return mod",
"def _look_in_package(tree: dict, module_path: str, name: str, level: Optional[int] = None) -> Union[str, None]:\n parent_path = os.path.dirname(module_path)\n if level is not None:\n for _ in range(level - 1):\n parent_path = os.path.dirname(parent_path)\n parent = find_tree(tree, lambda x, p: x[\"path\"] in [p, os.path.join(p, \"__init__.py\")], args=(parent_path,))\n if parent:\n if parent[\"fullname\"] in [name, \"{}.__init__\".format(name)]:\n return parent[\"path\"]\n for child in parent[\"children\"].values():\n if child[\"name\"] == name:\n return child[\"path\"]\n target = find_tree(tree, lambda x, f: x[\"fullname\"] == f, args=(\"{}.{}\".format(parent[\"fullname\"], name),))\n if target:\n return target[\"path\"]\n return None",
"def _load_module_recursive(self, dir) :\t\n\t\tfor filepath in os.listdir(dir) :\n\t\t\tfullpath = os.path.join(dir, filepath)\n\n\t\t\tif os.path.isdir(fullpath) :\n\t\t\t\tself._load_module_recursive(fullpath)\n\n\t\t\telif os.path.splitext(filepath)[1] == '.py' :\n\t\t\t\tutils.load_module(fullpath, self.settings.ROOT_PATH)",
"def find_module_file(base_directory, path):\n return os.path.join(base_directory, path)",
"def _import_from(mod, path, mod_dir=None):\n\n if mod in sys.modules:\n return sys.modules[mod]\n\n if mod_dir is None:\n full_mod = mod\n else:\n full_mod = mod_dir.replace(os.sep, '.')\n\n if mod_dir is None:\n mod_dir = mod.replace('.', os.sep)\n\n if not os.path.exists(path):\n return None\n\n source_path = os.path.join(path, mod_dir, '__init__.py')\n if not os.path.exists(source_path):\n source_path = os.path.join(path, mod_dir + '.py')\n\n if not os.path.exists(source_path):\n return None\n\n if os.sep in mod_dir:\n append, mod_dir = mod_dir.rsplit(os.sep, 1)\n path = os.path.join(path, append)\n\n try:\n if sys.version_info < (3, 5):\n mod_info = imp.find_module(mod_dir, [path])\n return imp.load_module(mod, *mod_info)\n\n else:\n package = mod.split('.', 1)[0]\n package_dir = full_mod.split('.', 1)[0]\n package_path = os.path.join(path, package_dir)\n CUSTOM_FINDER.add_module(package, package_path)\n\n return importlib.import_module(mod)\n\n except ImportError:\n return None",
"def _find_module_path(self, fullname, module_name, search_dirs):\n self.msgin(4, \"_find_module_path <-\", fullname, search_dirs)\n\n # Top-level 2-tuple to be returned.\n path_data = None\n\n # List of the absolute paths of all directories comprising the\n # namespace package to which this module belongs if any.\n namespace_dirs = []\n\n try:\n for search_dir in search_dirs:\n # PEP 302-compliant importer making loaders for this directory.\n importer = pkgutil.get_importer(search_dir)\n\n # If this directory is not importable, continue.\n if importer is None:\n # self.msg(4, \"_find_module_path importer not found\", search_dir)\n continue\n\n # Get the PEP 302-compliant loader object loading this module.\n #\n # If this importer defines the PEP 302-compliant find_loader()\n # method, prefer that.\n if hasattr(importer, 'find_loader'):\n loader, loader_namespace_dirs = importer.find_loader(\n module_name)\n namespace_dirs.extend(loader_namespace_dirs)\n # Else if this importer defines the Python 2-specific\n # find_module() method, fall back to that. Despite the method\n # name, this method returns a loader rather than a module.\n elif hasattr(importer, 'find_module'):\n loader = importer.find_module(module_name)\n # Else, raise an exception.\n else:\n raise ImportError(\n \"Module %r importer %r loader unobtainable\" % (module_name, importer))\n\n # If this module is not loadable from this directory, continue.\n if loader is None:\n # self.msg(4, \"_find_module_path loader not found\", search_dir)\n continue\n\n # Absolute path of this module. If this module resides in a\n # compressed archive, this is the absolute path of this module\n # after extracting this module from that archive and hence\n # should not exist; else, this path should typically exist.\n pathname = None\n\n # If this loader defines the PEP 302-compliant get_filename()\n # method, preferably call that method first. Most if not all\n # loaders (including zipimporter objects) define this method.\n if hasattr(loader, 'get_filename'):\n pathname = loader.get_filename(module_name)\n # Else if this loader provides a \"path\" attribute, defer to that.\n elif hasattr(loader, 'path'):\n pathname = loader.path\n # Else, raise an exception.\n else:\n raise ImportError(\n \"Module %r loader %r path unobtainable\" % (module_name, loader))\n\n # If no path was found, this is probably a namespace package. In\n # such case, continue collecting namespace directories.\n if pathname is None:\n self.msg(4, \"_find_module_path path not found\", pathname)\n continue\n\n # Return such metadata.\n path_data = (pathname, loader)\n break\n # Else if this is a namespace package, return such metadata.\n else:\n if namespace_dirs:\n path_data = (namespace_dirs[0],\n NAMESPACE_PACKAGE(namespace_dirs))\n except UnicodeDecodeError as exc:\n self.msgout(1, \"_find_module_path -> unicode error\", exc)\n # Ensure that exceptions are logged, as this function is typically\n # called by the import_module() method which squelches ImportErrors.\n except Exception as exc:\n self.msgout(4, \"_find_module_path -> exception\", exc)\n raise\n\n # If this module was not found, raise an exception.\n self.msgout(4, \"_find_module_path ->\", path_data)\n if path_data is None:\n raise ImportError(\"No module named \" + repr(module_name))\n\n return path_data",
"def load_module(self, fqn):\n trace(\"load_module\", fqn)\n trace(\"sys.modules\", sys.modules)\n p = lookupWithMapper(self.mapper, fqn)\n trace(\"load_module\", fqn, \"done\", id(p))\n\n if fqn in _sysModulesSpecialCases:\n # This module didn't have access to our isolated sys.modules when it\n # did its sys.modules modification. Replicate it here.\n for submoduleName in _sysModulesSpecialCases[fqn]:\n subfqn = '.'.join([fqn, submoduleName])\n sys.modules[subfqn] = getattr(p, submoduleName, None)\n return p",
"def ppimport(name):\n global _ppimport_is_enabled\n\n level = 1\n parent_frame = p_frame = _get_frame(level)\n while not p_frame.f_locals.has_key('__name__'):\n level = level + 1\n p_frame = _get_frame(level)\n\n p_name = p_frame.f_locals['__name__']\n if p_name=='__main__':\n p_dir = ''\n fullname = name\n elif p_frame.f_locals.has_key('__path__'):\n # python package\n p_path = p_frame.f_locals['__path__']\n p_dir = p_path[0]\n fullname = p_name + '.' + name\n else:\n # python module\n p_file = p_frame.f_locals['__file__']\n p_dir = os.path.dirname(p_file)\n fullname = p_name + '.' + name\n\n # module may be imported already\n module = sys.modules.get(fullname)\n if module is not None:\n if _ppimport_is_enabled or isinstance(module, types.ModuleType):\n return module\n return module._ppimport_importer()\n\n so_ext = _get_so_ext()\n py_exts = ('.py','.pyc','.pyo')\n so_exts = (so_ext,'module'+so_ext)\n\n for d,n,fn,e in [\\\n # name is local python module or local extension module\n (p_dir, name, fullname, py_exts+so_exts),\n # name is local package\n (os.path.join(p_dir, name), '__init__', fullname, py_exts),\n # name is package in parent directory (scipy specific)\n (os.path.join(os.path.dirname(p_dir), name), '__init__', name, py_exts),\n ]:\n location = _is_local_module(d, n, e)\n if location is not None:\n fullname = fn\n break\n\n if location is None:\n # name is to be looked in python sys.path.\n fullname = name\n location = 'sys.path'\n\n # Try once more if module is imported.\n # This covers the case when importing from python module\n module = sys.modules.get(fullname)\n\n if module is not None:\n if _ppimport_is_enabled or isinstance(module,types.ModuleType):\n return module\n return module._ppimport_importer()\n # It is OK if name does not exists. The ImportError is\n # postponed until trying to use the module.\n\n loader = _ModuleLoader(fullname,location,p_frame=parent_frame)\n if _ppimport_is_enabled:\n return loader\n\n return loader._ppimport_importer()",
"def _load_module(modname):\n if modname in sys.modules:\n raise ImportError(\"Stock module %r already loaded\" % modname)\n searchpath = [HERE]\n if \"DEFUSED_EXPAT\" in os.environ:\n # for unit testing\n searchpath.extend(os.environ[\"DEFUSED_EXPAT\"].split(os.pathsep))\n fh = None\n try:\n fh, filename, description = imp.find_module(modname, searchpath)\n mod = imp.load_module(modname, fh, filename, description)\n finally:\n if fh is not None:\n fh.close()\n modpath = getattr(sys.modules[modname], \"__file__\", \"\")\n if not modpath.startswith(HERE):\n raise ValueError(\"Unpatched module %r loaded (%s != %s)\" %\n (mod, moddir, HERE))\n return mod",
"def load_from_module_path(self, filename: str) -> None:\n # pylint: disable=import-outside-toplevel\n import importlib.util\n spec = importlib.util.spec_from_file_location(\"base_config\", filename)\n module = importlib.util.module_from_spec(spec)\n if spec.loader is not None:\n spec.loader.exec_module(module)\n else:\n raise Exception(\"Could not get module loader from spec\")\n self.load_from_module(module)",
"def find_main_module(self):\n\n if self.type == 'passthrough':\n return None\n directory, basename = os.path.split(self.main_module)\n module, ext = os.path.splitext(basename)\n if ext:\n # if the module include the extension, just return its absolute\n # path\n return os.path.join(self.code_dir, self.main_module)\n\n # Otherwise, try to find the proper module, by assuming that there\n # is only one file with such name. Note that this may fail if\n # there are other files such as byte-compiled binaries, etc.\n found = glob.glob(os.path.join(self.code_dir, directory, module+'.*'))\n if not found:\n raise APIException('module not found: {}'\n .format(self.main_module), 400)\n\n return found[0]",
"def _locate(path: str) -> Any:\n if path == \"\":\n raise ImportError(\"Empty path\")\n from importlib import import_module\n from types import ModuleType\n\n parts = [part for part in path.split(\".\")]\n for part in parts:\n if not len(part):\n raise ValueError(\n f\"Error loading '{path}': invalid dotstring.\"\n + \"\\nRelative imports are not supported.\"\n )\n assert len(parts) > 0\n part0 = parts[0]\n try:\n obj = import_module(part0)\n except Exception as exc_import:\n raise ImportError(\n f\"Error loading '{path}':\\n{repr(exc_import)}\"\n + f\"\\nAre you sure that module '{part0}' is installed?\"\n ) from exc_import\n for m in range(1, len(parts)):\n part = parts[m]\n try:\n obj = getattr(obj, part)\n except AttributeError as exc_attr:\n parent_dotpath = \".\".join(parts[:m])\n if isinstance(obj, ModuleType):\n mod = \".\".join(parts[: m + 1])\n try:\n obj = import_module(mod)\n continue\n except ModuleNotFoundError as exc_import:\n raise ImportError(\n f\"Error loading '{path}':\\n{repr(exc_import)}\"\n + f\"\\nAre you sure that '{part}' is importable from module '{parent_dotpath}'?\"\n ) from exc_import\n except Exception as exc_import:\n raise ImportError(\n f\"Error loading '{path}':\\n{repr(exc_import)}\"\n ) from exc_import\n raise ImportError(\n f\"Error loading '{path}':\\n{repr(exc_attr)}\"\n + f\"\\nAre you sure that '{part}' is an attribute of '{parent_dotpath}'?\"\n ) from exc_attr\n return obj",
"def loadModule(path, doReload=False):\n relPath = Files.relName(path)\n context = Context.getContext()\n parentMod = context.package\n if parentMod is not None:\n modName = \"%s.%s\" % (parentMod.__name__,\n relPath.replace(\"/\", \".\")[:-3])\n else:\n modName = \"%s\" % (relPath.replace(\"/\", \".\")[:-3])\n if not doReload and path in _loadedModules:\n return _loadedModules[path]\n\n ns = {}\n here = os.getcwd()\n subDir = os.path.dirname(path)\n if subDir:\n os.chdir(subDir)\n\n global _loading, _curScriptPackage\n try:\n try:\n try:\n _loading = os.path.basename(path)\n _curScriptPackage = parentMod\n mod = imp.load_source(modName, os.path.basename(path))\n except Unsupported as exc:\n return\n except Exception as exc:\n print(formatImportFailure(modName, exc))\n print(\"Hmm\", exc)\n raise\n except Unsupported:\n return\n finally:\n os.chdir(here)\n return mod",
"def import_from_cwd(module, imp=..., package=...):\n ...",
"def reload_from_cwd(module, reloader=...):\n ...",
"def _loadManifest(self, pkg):\r\n if pkg in self._packages:\r\n return\r\n\r\n sys.path = self._generatePythonPath(pkg) + sys.path",
"def relative_import(path):\n caller_path = os.path.abspath(inspect.getfile(inspect.currentframe().f_back))\n\n script_path = os.path.abspath(os.path.join(os.path.dirname(caller_path), path))\n script_name = os.path.splitext(os.path.basename(script_path))[0]\n\n sys.path.append(os.path.dirname(script_path))\n try:\n module = importlib.import_module(script_name)\n importlib.reload(module)\n return module\n finally:\n del sys.path[-1]",
"def load_module(module_name, file_name):\n from importlib.machinery import SourceFileLoader\n home_dir = os.path.expanduser(\"~\")\n valid_paths = [\n os.path.join(home_dir, \"Google Drive\"),\n os.path.join(home_dir, \"GoogleDrive\"),\n os.path.join(os.path.join(home_dir, \"Desktop\"), \"Google Drive\"),\n os.path.join(os.path.join(home_dir, \"Desktop\"), \"GoogleDrive\"),\n os.path.join(\"C:/\", \"GoogleDrive\"),\n os.path.join(\"C:/\", \"Google Drive\"),\n os.path.join(\"D:/\", \"GoogleDrive\"),\n os.path.join(\"D:/\", \"Google Drive\"),\n ]\n\n drive_path = None\n for path in valid_paths:\n if os.path.isdir(path):\n drive_path = path\n break\n\n if drive_path is None:\n logger_lib = None\n print(\"Logger library not found in shared repo.\", flush = True)\n #raise Exception(\"Couldn't find google drive folder!\")\n else: \n utils_path = os.path.join(drive_path, \"_pyutils\")\n print(\"Loading [{}] package...\".format(os.path.join(utils_path,file_name)),flush = True)\n logger_lib = SourceFileLoader(module_name, os.path.join(utils_path, file_name)).load_module()\n print(\"Done loading [{}] package.\".format(os.path.join(utils_path,file_name)),flush = True)\n\n return logger_lib",
"def load_module(module_name, file_name):\n from importlib.machinery import SourceFileLoader\n home_dir = os.path.expanduser(\"~\")\n valid_paths = [\n os.path.join(home_dir, \"Google Drive\"),\n os.path.join(home_dir, \"GoogleDrive\"),\n os.path.join(os.path.join(home_dir, \"Desktop\"), \"Google Drive\"),\n os.path.join(os.path.join(home_dir, \"Desktop\"), \"GoogleDrive\"),\n os.path.join(\"C:/\", \"GoogleDrive\"),\n os.path.join(\"C:/\", \"Google Drive\"),\n os.path.join(\"D:/\", \"GoogleDrive\"),\n os.path.join(\"D:/\", \"Google Drive\"),\n ]\n\n drive_path = None\n for path in valid_paths:\n if os.path.isdir(path):\n drive_path = path\n break\n\n if drive_path is None:\n logger_lib = None\n print(\"Logger library not found in shared repo.\", flush = True)\n #raise Exception(\"Couldn't find google drive folder!\")\n else: \n utils_path = os.path.join(drive_path, \"_pyutils\")\n print(\"Loading [{}] package...\".format(os.path.join(utils_path,file_name)),flush = True)\n logger_lib = SourceFileLoader(module_name, os.path.join(utils_path, file_name)).load_module()\n print(\"Done loading [{}] package.\".format(os.path.join(utils_path,file_name)),flush = True)\n\n return logger_lib",
"def fix_import_path(): \n import sys, os, struct\n bit_size = struct.calcsize(\"P\") * 8\n ARCH = '/x86' if bit_size == 32 else '/x64'\n LEAP_PATH = os.path.dirname(__file__) + '/leap'\n sys.path.extend([LEAP_PATH, LEAP_PATH + ARCH])",
"def find_module(self, abs_name, path=None):\n package_name = abs_name.split(\".\")[0]\n\n last_name = abs_name.split(\".\")[-1]\n if last_name in sys.modules:\n return None\n\n try:\n # means it can already be imported, no work to be done here\n imp.find_module(abs_name)\n\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # see the comment in the docstring\n return None\n except ImportError as e:\n pass\n\n if package_name == \"talus\" and self._module_in_git(abs_name):\n self.download_module(abs_name)\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # see the comment in the docstring\n return None\n\n if package_name in self.cache[\"packages\"] and package_name not in sys.modules:\n self.install_package_from_talus(package_name)\n return None\n\n # we NEED to have the 2nd check here or else it will keep downloading\n # the same package over and over\n if package_name in self.cache[\"pypi\"] and package_name not in sys.modules:\n self.install_cached_package(package_name)\n return None\n\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # THIS IS IMPORTANT, YES WE WANT TO RETURN NONE!!!\n # see the comment in the docstring\n return None"
] | [
"0.6502171",
"0.6038095",
"0.5900002",
"0.58177245",
"0.57946616",
"0.57328486",
"0.5724542",
"0.57179606",
"0.5698969",
"0.5637101",
"0.5621891",
"0.5580989",
"0.5569779",
"0.55592674",
"0.55570364",
"0.5522914",
"0.5515631",
"0.5507379",
"0.5486642",
"0.54800147",
"0.54796576",
"0.5466692",
"0.5428221",
"0.54193324",
"0.5404159",
"0.5379558",
"0.53730786",
"0.53730786",
"0.53685385",
"0.53676355"
] | 0.7147778 | 0 |
Attach disk to VM by reconfiguration. | def attach_disk_to_vm(self, vm_ref, instance_name,
adapter_type, disk_type, vmdk_path=None,
disk_size=None, linked_clone=False,
controller_key=None, unit_number=None,
device_name=None):
client_factory = self._session._get_vim().client.factory
vmdk_attach_config_spec = vm_util.get_vmdk_attach_config_spec(
client_factory, adapter_type, disk_type,
vmdk_path, disk_size, linked_clone,
controller_key, unit_number, device_name)
LOG.debug(_("Reconfiguring VM instance %(instance_name)s to attach "
"disk %(vmdk_path)s or device %(device_name)s with type "
"%(disk_type)s") % locals())
reconfig_task = self._session._call_method(
self._session._get_vim(),
"ReconfigVM_Task", vm_ref,
spec=vmdk_attach_config_spec)
self._session._wait_for_task(instance_name, reconfig_task)
LOG.debug(_("Reconfigured VM instance %(instance_name)s to attach "
"disk %(vmdk_path)s or device %(device_name)s with type "
"%(disk_type)s") % locals()) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def AttachDisk(self, disk: 'AZComputeDisk') -> None:\n vm = self.compute_client.virtual_machines.get(\n self.resource_group_name, self.name)\n data_disks = vm.storage_profile.data_disks\n # ID to assign to the data disk to attach\n lun = 0 if len(data_disks) == 0 else len(data_disks) + 1\n\n update_data = {\n 'lun': lun,\n 'name': disk.name,\n 'create_option': models.DiskCreateOption.attach,\n 'managed_disk': {'id': disk.resource_id}\n }\n\n data_disks.append(update_data)\n\n try:\n request = self.compute_client.virtual_machines.begin_update(\n self.resource_group_name, self.name, vm)\n while not request.done():\n sleep(5) # Wait 5 seconds before checking vm status again\n except azure_exceptions.CloudError as exception:\n raise RuntimeError(\n 'Could not attach disk {0:s} to instance {1:s}: {2:s}'.format(\n disk.name, self.name, str(exception))) from exception",
"def _AttachDisk(self, idx, params, _):\n uuid = params.get(\"uuid\", None)\n name = params.get(constants.IDISK_NAME, None)\n\n disk = self.GenericGetDiskInfo(uuid, name)\n\n # Rename disk before attaching (if disk is filebased)\n if disk.dev_type in constants.DTS_INSTANCE_DEPENDENT_PATH:\n # Add disk size/mode, else GenerateDiskTemplate will not work.\n params[constants.IDISK_SIZE] = disk.size\n params[constants.IDISK_MODE] = str(disk.mode)\n dummy_disk = self._GenerateDiskTemplateWrapper(idx, disk.dev_type, params)\n new_logical_id = dummy_disk.logical_id\n result = self.rpc.call_blockdev_rename(self.instance.primary_node,\n [(disk, new_logical_id)])\n result.Raise(\"Failed before attach\")\n self.cfg.SetDiskLogicalID(disk.uuid, new_logical_id)\n disk.logical_id = new_logical_id\n\n # Attach disk to instance\n self.cfg.AttachInstanceDisk(self.instance.uuid, disk.uuid, idx)\n\n # re-read the instance from the configuration\n self.instance = self.cfg.GetInstanceInfo(self.instance.uuid)\n\n changes = [\n (\"disk/%d\" % idx,\n \"attach:size=%s,mode=%s\" % (disk.size, disk.mode)),\n ]\n\n disks_ok, _, payloads = AssembleInstanceDisks(self, self.instance,\n disks=[disk])\n if not disks_ok:\n changes.append((\"disk/%d\" % idx, \"assemble:failed\"))\n return disk, changes\n\n if self.op.hotplug:\n _, link_name, uri = payloads[0]\n msg = self._HotplugDevice(constants.HOTPLUG_ACTION_ADD,\n constants.HOTPLUG_TARGET_DISK,\n disk, (link_name, uri), idx)\n changes.append((\"disk/%d\" % idx, msg))\n\n return (disk, changes)",
"def attachDiskToMinipad(self , disk):\n return",
"def attach_disk(self, instance, disk, zone):\n return self.call_api(\n '/zones/%s/instances/%s/attachDisk' % (zone, instance),\n method='POST',\n payload={\n 'autoDelete': True,\n 'deviceName': disk,\n 'source': 'projects/%s/zones/%s/disks/%s' % (\n self.project_id, zone, disk),\n },\n )",
"def add_disk(self, vm, size, disk_type='thin'):\n logger.info(f\"Adding disk to {vm.config.name}\")\n spec = vim.vm.ConfigSpec()\n controller = self.get_controller_for_adding_disk(vm)\n unit_number = self.get_unit_number(vm)\n logger.info(f\"Unit number for new disk: {unit_number}\")\n\n device_changes = []\n new_disk_kb = int(size) * GB2KB\n disk_spec = vim.vm.device.VirtualDeviceSpec()\n disk_spec.fileOperation = \"create\"\n disk_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add\n disk_spec.device = vim.vm.device.VirtualDisk()\n disk_spec.device.backing = vim.vm.device.VirtualDisk.FlatVer2BackingInfo()\n if disk_type == VM_DISK_TYPE:\n disk_spec.device.backing.thinProvisioned = True\n disk_spec.device.backing.diskMode = VM_DISK_MODE\n disk_spec.device.unitNumber = unit_number\n disk_spec.device.capacityInKB = new_disk_kb\n disk_spec.device.controllerKey = controller.key\n device_changes.append(disk_spec)\n spec.deviceChange = device_changes\n WaitForTask(vm.ReconfigVM_Task(spec=spec))\n logger.info(f\"{size}GB disk added successfully to {vm.config.name}\")",
"def disk(self, disk):\n self._context[\"disk\"] = disk",
"def disk_config(self, disk_config):\n\n self._disk_config = disk_config",
"def detachDiskFromMinipad(self , disk):\n return",
"def create_disk(self, disk):\n spec = {\n 'new_vmdk': {\n # Convert from mebibytes to bytes because VMDK is specified in bytes\n 'capacity': 1024\n * 1024\n * disk.size,\n }\n }\n\n try:\n backend_id = self.client.create_disk(disk.vm.backend_id, spec)\n except VMwareError as e:\n raise VMwareBackendError(e)\n else:\n disk.backend_id = backend_id\n disk.save(update_fields=['backend_id'])\n signals.vm_updated.send(self.__class__, vm=disk.vm)\n return disk",
"def set_virtual_disk_storage_profile(vm, hardware_device, profile):\n\n spec = vim.vm.ConfigSpec()\n device_specs = []\n profile_specs = []\n profile_spec = vim.vm.DefinedProfileSpec()\n profile_spec.profileId = profile.profileId.uniqueId\n profile_specs.append(profile_spec)\n\n device_spec = vim.vm.device.VirtualDeviceSpec()\n device_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit\n device_spec.device = hardware_device\n device_spec.profile = profile_specs\n device_specs.append(device_spec)\n spec.deviceChange = device_specs\n vm.ReconfigVM_Task(spec)",
"def attachDisk(\n positive, alias, vm_name, active=True, read_only=False, disk_id=None,\n interface='virtio', bootable=None,\n):\n if disk_id:\n name = disk_id\n attribute = 'id'\n else:\n name = alias\n attribute = 'name'\n disk_object = get_disk_obj(name, attribute)\n # This is only needed because for legacy reason we also want to modify\n # the read_only property when we attach a disk\n # Also for attaching a disk the active parameter is pass inside the disk\n # object\n updated_disk = _prepareDiskObject(\n id=disk_object.get_id(), read_only=read_only\n )\n vm_disks = getObjDisks(vm_name)\n logger.info(\"Attaching disk %s to vm %s\", alias, vm_name)\n disk_attachment = prepare_disk_attachment_object(\n updated_disk.get_id(), interface=interface, bootable=bootable,\n disk=updated_disk, active=active\n )\n return DISK_ATTACHMENTS_API.create(\n disk_attachment, positive, collection=vm_disks\n )[1]",
"def attach_volume(self, context, connection_info, instance, mountpoint,\n disk_bus=None, device_type=None, encryption=None):",
"def add_vm_with_disk(request, storage):\n self = request.node.cls\n\n def finalizer():\n assert ll_vms.safely_remove_vms(\n [self.test_vm_name]\n ), \"Unable to remove VM %s\" % self.test_vm_name\n\n request.addfinalizer(finalizer)\n self.vm_names = list()\n self.test_vm_name = storage_helpers.create_unique_object_name(\n self.__name__, config.OBJECT_TYPE_VM\n )\n vm_args = config.create_vm_args.copy()\n vm_args['vmName'] = self.test_vm_name\n vm_args['storageDomainName'] = self.storage_domain\n\n testflow.setup(\"Creating VM %s\", self.test_vm_name)\n assert storage_helpers.create_vm_or_clone(**vm_args), (\n \"Failed to create VM %s\" % self.test_vm_name\n )\n self.vm_names.append(self.test_vm_name)\n\n testflow.setup(\n \"Attaching disk %s to VM %s\", self.disk_name, self.test_vm_name\n )\n assert ll_disks.attachDisk(True, self.disk_name, self.test_vm_name), (\n \"Failed to attach disk %s to VM %s\" %\n (self.disk_name, self.test_vm_name)\n )\n assert ll_disks.wait_for_disks_status(self.disk_name), (\n \"Disk %s is not in the expected state 'OK\" % self.disk_name\n )",
"def attach_volume(self, instance_name, device_path, mountpoint):\n return True",
"def test_06_migrate_vm_live_attach_disk(self):\n \n global vm\n global data_disk_1\n data_disk_1 = self.helper.create_custom_disk(\n self.apiclient,\n {\"diskname\":\"StorPoolDisk\" },\n zoneid=self.zone.id,\n size = 5,\n miniops = 2000,\n maxiops = 5000,\n account=self.account.name,\n domainid=self.account.domainid,\n diskofferingid=self.disk_offerings.id,\n )\n\n self.debug(\"Created volume with ID: %s\" % data_disk_1.id)\n\n self.virtual_machine_live_migration_1.attach_volume(\n self.apiclient,\n data_disk_1\n )\n\n destinationHost, vol_list = self.helper.get_destination_pools_hosts(self.apiclient, vm, self.host)\n for v in vol_list:\n self.helper.check_storpool_volume_iops(self.spapi, v)\n vm = self.helper.migrateVm(self.apiclient, self.virtual_machine_live_migration_1, destinationHost)\n\n\n self.virtual_machine_live_migration_1.attach_volume(\n self.apiclient,\n self.volume\n )\n\n destinationHost, vol_list = self.helper.get_destination_pools_hosts(self.apiclient,vm, self.host)\n for v in vol_list:\n self.helper.check_storpool_volume_iops(self.spapi, v)\n vm = self.helper.migrateVm(self.apiclient, self.virtual_machine_live_migration_1, destinationHost)\n\n destinationHost, vol_list = self.helper.get_destination_pools_hosts(self.apiclient,vm, self.host)\n for v in vol_list:\n self.helper.check_storpool_volume_iops(self.spapi, v)",
"def attach(self, storages):\n self.tracer.info(\"%s.attach method called\" % self.__class__.__name__)\n\n # reload global.ini\n self._cfg.reload()\n\n # connect to Google API\n conn = self.api_conn()\n\n # fetch the GCE zone for this host\n zone = self.get_zone(conn, HOSTNAME)\n\n for storage in storages:\n # fetch pd & dev variables from global.ini for specified partition & usage\n connectionData = self._getConnectionDataForLun(storage.get(\"partition\"), storage.get(\"usage_type\"))\n try:\n pd = connectionData[\"pd\"]\n dev = connectionData[\"dev\"]\n except:\n raise Exception(\"pd or dev not set in global.ini\")\n\n # fetch mount options from global.ini\n try:\n mount_options = connectionData[\"mountoptions\"]\n except:\n mount_options = \"\"\n\n # fetch fencing options from global.ini\n try:\n fencing = connectionData[\"fencing\"]\n except:\n fencing = \"\"\n\n # fetch the host which currently owns the disk & the file path\n pdhost = self.get_pd_host(conn, pd, zone)\n path = storage.get(\"path\")\n\n # check if the require disk is already attached somewhere. If it is, detach it and fence the old host\n if pdhost == HOSTNAME:\n self.tracer.info(\"disk %s is already attached to %s(%s)\" % (pd, HOSTNAME, zone))\n self.mount(dev, path, mount_options)\n continue\n elif pdhost != \"\":\n self.tracer.info(\"unable to attach %s to %s(%s) as it is still attached to %s\" % (pd, HOSTNAME, zone, pdhost))\n self.detach_pd(conn, pdhost, pd)\n if fencing.lower() == \"enabled\" or fencing.lower() == \"true\" or fencing.lower() == \"yes\":\n self.fence(conn, pdhost)\n\n # prepare payload for API call\n pdurl = self.zonal_url(zone, \"disks\", pd)\n body = {\n \"deviceName\": pd,\n \"source\": pdurl\n }\n\n # send API call to disconnect disks\n self.tracer.info(\"attempting to attach %s to %s(%s)\" % (pd, HOSTNAME, zone))\n operation = conn.instances().attachDisk(project=PROJECT, zone=zone, instance=HOSTNAME, body=body).execute()\n self.wait_for_operation(conn, operation, zone)\n\n # check if disk is attached and if so, mount the volumes\n if self.get_pd_host(conn, pd, zone) == HOSTNAME:\n self.tracer.info(\"successfully attached %s to %s(%s)\" % (pd, HOSTNAME, zone))\n self.mount(dev, path, mount_options)\n else:\n raise Exception(\"failed to attached %s to %s(%s)\" % (pd, HOSTNAME, zone))\n\n # tell HANA is all good and to continue the load process\n return 0",
"def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir,\n bootimg_dir, kernel_dir, native_sysroot):\n logger.debug(\"SourcePlugin: do_install_disk: disk: %s\", disk_name)",
"def disk_detach(vmdk_path, vm):\n\n device = findDeviceByPath(vmdk_path, vm)\n\n if not device:\n # Could happen if the disk attached to a different VM - attach fails\n # and docker will insist to sending \"unmount/detach\" which also fails.\n msg = \"*** Detach failed: disk={0} not found. VM={1}\".format(\n vmdk_path, vm.config.uuid)\n logging.warning(msg)\n return err(msg)\n\n spec = vim.vm.ConfigSpec()\n dev_changes = []\n\n disk_spec = vim.vm.device.VirtualDeviceSpec()\n disk_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.remove\n disk_spec.device = device\n dev_changes.append(disk_spec)\n spec.deviceChange = dev_changes\n\n try:\n wait_for_tasks(si, [vm.ReconfigVM_Task(spec=spec)])\n except vim.fault.GenericVmConfigFault as ex:\n for f in ex.faultMessage:\n logging.warning(f.message)\n return err(\"Failed to detach \" + vmdk_path)\n\n setStatusDetached(vmdk_path)\n logging.info(\"Disk detached %s\", vmdk_path)\n return None",
"def detach_disk_from_vm(self, vm_ref, instance_name, device):\n client_factory = self._session._get_vim().client.factory\n vmdk_detach_config_spec = vm_util.get_vmdk_detach_config_spec(\n client_factory, device)\n disk_key = device.key\n LOG.debug(_(\"Reconfiguring VM instance %(instance_name)s to detach \"\n \"disk %(disk_key)s\") % locals())\n reconfig_task = self._session._call_method(\n self._session._get_vim(),\n \"ReconfigVM_Task\", vm_ref,\n spec=vmdk_detach_config_spec)\n self._session._wait_for_task(instance_name, reconfig_task)\n LOG.debug(_(\"Reconfigured VM instance %(instance_name)s to detach \"\n \"disk %(disk_key)s\") % locals())",
"def setDisk(self, disk):\n self.__disk = disk",
"def attach(self, node, device=None):\r\n\r\n return self.driver.attach_volume(node=node, volume=self, device=device)",
"def _flash_dev(disk: pathlib.Path, image_path: pathlib.Path) -> None:\n shutil.copy(image_path, disk, follow_symlinks=False)\n if not platform.system() == \"Windows\":\n os.sync()",
"def connect_disk(self, instance, disk_info, stg_ftsk=None):\n raise NotImplementedError()",
"def attach_volume(self, context, connection_info, instance, mountpoint,\n disk_bus=None, device_type=None, encryption=None):\n data = connection_info['data']\n vm = self._get_instance(instance.uuid)\n data_disks = vm.storage_profile.data_disks\n luns = [i.lun for i in data_disks]\n new_lun = 1\n # azure allow upto 16 extra datadisk, 1 os disk + 1 ephemeral disk\n # ephemeral disk will always be sdb for linux.\n for i in range(1, 16):\n if i not in luns:\n new_lun = i\n break\n else:\n msg = 'Can not attach volume, exist volume amount upto 16.'\n LOG.error(msg)\n raise nova_ex.NovaException(msg)\n disk = self.disks.get(CONF.azure.resource_group, data['disk_name'])\n managed_disk = dict(id=disk.id)\n data_disk = dict(lun=new_lun,\n name=data['disk_name'],\n managed_disk=managed_disk,\n create_option='attach')\n data_disks.append(data_disk)\n self._create_update_instance(instance, vm)\n LOG.info(_LI(\"Attach Volume to Instance in Azure finish\"),\n instance=instance)",
"def attach_volume(self, host_path: str, container_path: str, mode: str = None):\n self.volumes[host_path] = {\n \"bind\": container_path,\n \"mode\": mode or \"Z\"\n }",
"def reconfigure_nova_ephemeral_disk(self):\n self.check_run('reconfigure_nova_ephemeral_disk')\n self.show_step(1, initialize=True)\n self.env.revert_snapshot(\"reconfigure_overcommit_ratio\")\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n computes = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['compute'])\n\n self.show_step(2)\n existing_configs = self.fuel_web.client.list_configuration(\n cluster_id)\n for existing_config in existing_configs:\n self.fuel_web.client.delete_configuration(existing_config[\"id\"])\n\n self.show_step(3)\n config = utils.get_config_template('nova_disk')\n structured_config = get_structured_config_dict(config)\n self.fuel_web.client.upload_configuration(config,\n cluster_id,\n role='compute')\n\n service_name = \"nova-compute\"\n\n uptimes = self.get_service_uptime(computes, service_name)\n\n self.show_step(4)\n task = self.fuel_web.client.apply_configuration(cluster_id,\n role='compute')\n self.show_step(5)\n self.fuel_web.assert_task_success(task, timeout=900, interval=5)\n\n self.show_step(6)\n self.check_service_was_restarted(computes, uptimes, service_name)\n\n self.show_step(7)\n self.check_config_on_remote(computes, structured_config)\n\n os_conn = os_actions.OpenStackActions(\n self.fuel_web.get_public_vip(cluster_id))\n\n self.show_step(8)\n self.show_step(9)\n self.show_step(10)\n self.show_step(11)\n self.show_step(12)\n self.check_nova_ephemeral_disk(os_conn, cluster_id)\n\n self.env.make_snapshot(\"reconfigure_nova_ephemeral_disk\",\n is_make=True)",
"def _setDisk(self, disk):\n log_method_call(self, self.name, old=getattr(self.disk, \"name\", None),\n new=getattr(disk, \"name\", None))\n self.parents = []\n if disk:\n self.parents.append(disk)",
"def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir,\n bootimg_dir, kernel_dir, native_sysroot):\n if not cls.__imgBiosObj:\n cls.__instanciateBIOSClass()\n\n cls.__imgBiosObj.do_install_disk(disk, disk_name, creator, workdir,\n oe_builddir, bootimg_dir, kernel_dir,\n native_sysroot)",
"def attach_volume(self, connection_info, instance, mountpoint):\n instance_name = instance['name']\n vm_ref = vm_util.get_vm_ref_from_name(self._session, instance_name)\n if vm_ref is None:\n raise exception.InstanceNotFound(instance_id=instance_name)\n # Attach Volume to VM\n LOG.debug(_(\"Attach_volume: %(connection_info)s, %(instance_name)s, \"\n \"%(mountpoint)s\") % locals())\n driver_type = connection_info['driver_volume_type']\n if driver_type not in ['iscsi']:\n raise exception.VolumeDriverNotFound(driver_type=driver_type)\n data = connection_info['data']\n mount_unit = volume_util.mountpoint_to_number(mountpoint)\n\n # Discover iSCSI Target\n device_name, uuid = self.discover_st(data)\n if device_name is None:\n raise volume_util.StorageError(_(\"Unable to find iSCSI Target\"))\n\n # Get the vmdk file name that the VM is pointing to\n hardware_devices = self._session._call_method(vim_util,\n \"get_dynamic_property\", vm_ref,\n \"VirtualMachine\", \"config.hardware.device\")\n vmdk_file_path, controller_key, adapter_type, disk_type, unit_number \\\n = vm_util.get_vmdk_path_and_adapter_type(hardware_devices)\n # Figure out the correct unit number\n if unit_number < mount_unit:\n unit_number = mount_unit\n else:\n unit_number = unit_number + 1\n self.attach_disk_to_vm(vm_ref, instance_name,\n adapter_type, disk_type=\"rdmp\",\n controller_key=controller_key,\n unit_number=unit_number,\n device_name=device_name)\n LOG.info(_(\"Mountpoint %(mountpoint)s attached to \"\n \"instance %(instance_name)s\") % locals())",
"def createVM(self ,disk ,name):\n return"
] | [
"0.73531574",
"0.68157685",
"0.67563796",
"0.66293067",
"0.64325035",
"0.6426697",
"0.63882655",
"0.6334873",
"0.623497",
"0.6223832",
"0.62092334",
"0.6202001",
"0.6195019",
"0.61449945",
"0.6086266",
"0.6073298",
"0.6072383",
"0.6067116",
"0.6057906",
"0.59617317",
"0.58979154",
"0.5896533",
"0.5861778",
"0.58505684",
"0.58503675",
"0.5837277",
"0.58187383",
"0.5788604",
"0.5738113",
"0.57315546"
] | 0.7390084 | 0 |
Detach disk from VM by reconfiguration. | def detach_disk_from_vm(self, vm_ref, instance_name, device):
client_factory = self._session._get_vim().client.factory
vmdk_detach_config_spec = vm_util.get_vmdk_detach_config_spec(
client_factory, device)
disk_key = device.key
LOG.debug(_("Reconfiguring VM instance %(instance_name)s to detach "
"disk %(disk_key)s") % locals())
reconfig_task = self._session._call_method(
self._session._get_vim(),
"ReconfigVM_Task", vm_ref,
spec=vmdk_detach_config_spec)
self._session._wait_for_task(instance_name, reconfig_task)
LOG.debug(_("Reconfigured VM instance %(instance_name)s to detach "
"disk %(disk_key)s") % locals()) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def disk_detach(vmdk_path, vm):\n\n device = findDeviceByPath(vmdk_path, vm)\n\n if not device:\n # Could happen if the disk attached to a different VM - attach fails\n # and docker will insist to sending \"unmount/detach\" which also fails.\n msg = \"*** Detach failed: disk={0} not found. VM={1}\".format(\n vmdk_path, vm.config.uuid)\n logging.warning(msg)\n return err(msg)\n\n spec = vim.vm.ConfigSpec()\n dev_changes = []\n\n disk_spec = vim.vm.device.VirtualDeviceSpec()\n disk_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.remove\n disk_spec.device = device\n dev_changes.append(disk_spec)\n spec.deviceChange = dev_changes\n\n try:\n wait_for_tasks(si, [vm.ReconfigVM_Task(spec=spec)])\n except vim.fault.GenericVmConfigFault as ex:\n for f in ex.faultMessage:\n logging.warning(f.message)\n return err(\"Failed to detach \" + vmdk_path)\n\n setStatusDetached(vmdk_path)\n logging.info(\"Disk detached %s\", vmdk_path)\n return None",
"def detachDiskFromMinipad(self , disk):\n return",
"def _DetachDisk(self, idx, root, _):\n hotmsg = \"\"\n if self.op.hotplug:\n hotmsg = self._HotplugDevice(constants.HOTPLUG_ACTION_REMOVE,\n constants.HOTPLUG_TARGET_DISK,\n root, None, idx)\n\n # Always shutdown the disk before detaching.\n ShutdownInstanceDisks(self, self.instance, [root])\n\n # Rename detached disk.\n #\n # Transform logical_id from:\n # <file_storage_dir>/<instance_name>/<disk_name>\n # to\n # <file_storage_dir>/<disk_name>\n if root.dev_type in (constants.DT_FILE, constants.DT_SHARED_FILE):\n file_driver = root.logical_id[0]\n instance_path, disk_name = os.path.split(root.logical_id[1])\n new_path = os.path.join(os.path.dirname(instance_path), disk_name)\n new_logical_id = (file_driver, new_path)\n result = self.rpc.call_blockdev_rename(self.instance.primary_node,\n [(root, new_logical_id)])\n result.Raise(\"Failed before detach\")\n # Update logical_id\n self.cfg.SetDiskLogicalID(root.uuid, new_logical_id)\n\n # Remove disk from config\n self.cfg.DetachInstanceDisk(self.instance.uuid, root.uuid)\n\n # re-read the instance from the configuration\n self.instance = self.cfg.GetInstanceInfo(self.instance.uuid)\n\n return hotmsg",
"def detachDisk(positive, alias, vmName):\n logger.info(\"Detaching disk %s from vm %s\", alias, vmName)\n disk_attachment = get_disk_attachment(vmName, alias, attr='name')\n return DISK_ATTACHMENTS_API.delete(disk_attachment, positive)",
"def detach_volume(self, host_path: str):\n del self.volumes[host_path]",
"def detach_volume(self, connection_info, instance, mountpoint,\n encryption=None):",
"def detach_pd(self, conn, host, pd):\n zone = self.get_zone(conn, host)\n pdhost = self.get_pd_host(conn, pd, zone)\n if pdhost == \"\":\n self.tracer.info(\n \"disk %s is already attached to %s(%s)\" % (pd, host, zone))\n elif pdhost == host:\n self.tracer.info(\"attempting to detach %s from %s(%s)\" % (pd, host, zone))\n operation = conn.instances().detachDisk(project=PROJECT, zone=zone, instance=host, deviceName=pd).execute()\n self.wait_for_operation(conn, operation, zone)\n if self.get_pd_host(conn, pd, zone) == \"\":\n self.tracer.info(\"successfully detached %s from %s(%s)\" % (pd, host, zone))",
"def disconnect_disk_from_mgmt(self, vios_uuid, disk_name):\n raise NotImplementedError()",
"def disconnect_disk(self, instance, stg_ftsk=None, disk_type=None):\n raise NotImplementedError()",
"def detach(self):\r\n\r\n return self.driver.detach_volume(volume=self)",
"def detach_volume(self, connection_info, instance, mountpoint,\n encryption=None):\n vhd_name = connection_info['data']['disk_name']\n vm = self._get_instance(instance.uuid)\n data_disks = vm.storage_profile.data_disks\n not_found = True\n for i in range(len(data_disks)):\n if vhd_name == data_disks[i].name:\n del data_disks[i]\n not_found = False\n break\n if not_found:\n LOG.info(_LI('Volume: %s was not attached to Instance!'),\n vhd_name, instance=instance)\n return\n self._create_update_instance(instance, vm)\n LOG.info(_LI(\"Detach Volume to Instance in Azure finish\"),\n instance=instance)",
"def detach(self, name):\n volume_info = self.cm.find_name(name)\n if volume_info and volume_info[0]['State'] != \"deleted\":\n vms = volume_info[0]['AttachedToVm']\n path = volume_info[0]['path']\n if len(vms) == 0:\n Console.error(f\"{name} is not attached to any vm\")\n else:\n removed = []\n for vm in vms:\n result = self.unmount(path=f\"{path}/{name}\", vm=vm)\n mounts = result['mounts']\n if f\"{path}/{name}\" not in mounts.keys():\n removed.append(vm)\n for vm in removed:\n vms.remove(vm)\n result = self.update_volume_after_detach(volume_info, vms)\n return result[0]\n else:\n Console.error(\"volume does not exist or volume had been deleted\")",
"def detach(self):\n raise io.UnsupportedOperation",
"def detach_volume(self, instance_name, mountpoint):\n return True",
"def detach_volume(self, context, volume_id):\n # TODO(vish): refactor this into a more general \"unreserve\"\n # TODO(sleepsonthefloor): Is this 'elevated' appropriate?\n # self.db.volume_detached(context.elevated(), volume_id)\n self.db.volume_admin_metadata_delete(context.elevated(), volume_id,\n 'attached_mode')",
"def detach_volume(self):\n\n # Choose the volume\n volume_id = self._choose_among_used_volumes()\n\n # Cancel\n if not volume_id:\n print 'Operation cancelled'\n return\n\n # Detach the volume\n print '# Detaching volume \"%s\"!' % volume_id\n if self.compute.detach_volume(volume_id):\n print 'The volume has been detached!'\n else:\n print 'The volume could not been detached'",
"def delete(vmname, deldisk=True):\n\n dom = _conn.lookupByName(vmname)\n if dom.isActive():\n dom.destroy()\n infokeeper.update_status_vm(vmname, Instance.STATUS_POWER_OFF)\n dom.undefine()\n infokeeper.delete_vm(vmname)\n if deldisk:\n os.remove(os.path.join(base_disk_path, dom.name() + '.img'))\n return 'VM %s deleted' % vmname",
"def detach(self, force=False):\r\n instance_id = None\r\n if self.attach_data:\r\n instance_id = self.attach_data.instance_id\r\n device = None\r\n if self.attach_data:\r\n device = self.attach_data.device\r\n return self.connection.detach_volume(self.id, instance_id, device, force)",
"def detach(self, storages):\n self.tracer.info(\"%s.attach method called\" % self.__class__.__name__)\n\n # init variables & arrays\n all_pds = []\n all_vgs = []\n unmount_err = 0\n\n # reload global.ini\n self._cfg.reload()\n\n # connect to Google API\n conn = self.api_conn()\n\n # fetch the GCE zone for this host\n zone = self.get_zone(conn, HOSTNAME)\n\n for storage in storages:\n # fetch pd & dev variables for specified partition & usage\n connectionData = self._getConnectionDataForLun(storage.get(\"partition\"), storage.get(\"usage_type\"))\n try:\n pd = connectionData[\"pd\"]\n dev = connectionData[\"dev\"]\n except:\n raise Exception(\"pd or dev not set in global.ini\")\n\n # fetch the host which currently owns the disk & the file path\n path = storage.get(\"path\")\n\n # try to unmount the file system twice\n self._forcedUnmount(dev, path, 2)\n\n # if it's still mounted, try killing blocking processes and umount again\n if os.path.ismount(path):\n self._lsof_and_kill(path)\n self._forcedUnmount(dev, path, 2)\n\n # if still mounted, raise exception. The taking over node will stonith this host\n if os.path.ismount(path):\n self.tracer.warning(\"A PID belonging to someone other than SIDADM is blocking the unmount. This node will be fenced\")\n self._umount(path, lazy=True)\n mount_err = 1\n\n # add to list of devices.\n all_pds.append(pd)\n\n # check to see if the device is a VG. If so, add it to the list of VG's\n all_vgs.append(self.get_vg(dev))\n\n # Stop each unique VG\n all_vgs = list(set(all_vgs))\n for vg in all_vgs:\n Helper._runOsCommand(\"sudo /sbin/vgchange -an %s\" % vg, self.tracer)\n self.tracer.info(\"stopping volume group %s\" % (vg))\n\n # for each unique disk detected, detach it using Google API's\n all_pds = list(set(all_pds))\n for pd_member in all_pds:\n self.detach_pd(conn, HOSTNAME, pd_member)\n\n # if there was an error unmounting, self fence\n if unmount_err == 1:\n self.fence(conn, pdhost)\n\n # tell HANA we successfully detached\n return 0",
"def delete_disk(self, disk, delete_vmdk=True):\n backend_disk = self.get_backend_disk(disk)\n\n try:\n self.client.delete_disk(disk.vm.backend_id, disk.backend_id)\n except VMwareError as e:\n raise VMwareBackendError(e)\n\n if delete_vmdk:\n vdm = self.soap_client.content.virtualDiskManager\n task = vdm.DeleteVirtualDisk(\n name=backend_disk.backing.fileName,\n datacenter=self.get_disk_datacenter(backend_disk),\n )\n try:\n pyVim.task.WaitForTask(task)\n except Exception:\n logger.exception('Unable to delete VMware disk. Disk ID: %s.', disk.id)\n raise VMwareBackendError('Unknown error.')\n signals.vm_updated.send(self.__class__, vm=disk.vm)",
"def detach_volume(self, connection_info, instance, mountpoint):\n instance_name = instance['name']\n vm_ref = vm_util.get_vm_ref_from_name(self._session, instance_name)\n if vm_ref is None:\n raise exception.InstanceNotFound(instance_id=instance_name)\n # Detach Volume from VM\n LOG.debug(_(\"Detach_volume: %(instance_name)s, %(mountpoint)s\")\n % locals())\n driver_type = connection_info['driver_volume_type']\n if driver_type not in ['iscsi']:\n raise exception.VolumeDriverNotFound(driver_type=driver_type)\n data = connection_info['data']\n\n # Discover iSCSI Target\n device_name, uuid = volume_util.find_st(self._session, data,\n self._cluster)\n if device_name is None:\n raise volume_util.StorageError(_(\"Unable to find iSCSI Target\"))\n\n # Get the vmdk file name that the VM is pointing to\n hardware_devices = self._session._call_method(vim_util,\n \"get_dynamic_property\", vm_ref,\n \"VirtualMachine\", \"config.hardware.device\")\n device = vm_util.get_rdm_disk(hardware_devices, uuid)\n if device is None:\n raise volume_util.StorageError(_(\"Unable to find volume\"))\n self.detach_disk_from_vm(vm_ref, instance_name, device)\n LOG.info(_(\"Mountpoint %(mountpoint)s detached from \"\n \"instance %(instance_name)s\") % locals())",
"def _detach_volume(self, server, volume):\n try:\n volume = self.volumes_client.show_volume(volume['id'])['volume']\n # Check the status. You can only detach an in-use volume, otherwise\n # the compute API will return a 400 response.\n if volume['status'] == 'in-use':\n self.servers_client.detach_volume(server['id'], volume['id'])\n except lib_exc.NotFound:\n # Ignore 404s on detach in case the server is deleted or the volume\n # is already detached.\n pass",
"def _detach_volume(self, server, volume):\n try:\n volume = self.volumes_client.show_volume(volume['id'])['volume']\n # Check the status. You can only detach an in-use volume, otherwise\n # the compute API will return a 400 response.\n if volume['status'] == 'in-use':\n self.servers_client.detach_volume(server['id'], volume['id'])\n except lib_exc.NotFound:\n # Ignore 404s on detach in case the server is deleted or the volume\n # is already detached.\n pass",
"def vm_diskdelete(args):\n name = args.name\n diskname = args.diskname\n pool = args.pool\n config = Kconfig(client=args.client, debug=args.debug, region=args.region, zone=args.zone, namespace=args.namespace)\n k = config.k\n if diskname is None:\n common.pprint(\"Missing diskname. Leaving...\", color='red')\n os._exit(1)\n common.pprint(\"Deleting disk %s\" % diskname)\n k.delete_disk(name=name, diskname=diskname, pool=pool)\n return",
"async def eject(self) -> None:\n await self.dbus.Drive.call_eject(UDISKS2_DEFAULT_OPTIONS)",
"def down():\n\n # Stop the program if no init has occurred.\n Vagrant.stop_if_not_init()\n\n # Run vagrant halt from the vagrant folder.\n command = [\"vagrant\", \"halt\"]\n cwd = Settings.devbox_folder\n try:\n result = subprocess.check_call(command, cwd=cwd)\n except subprocess.CalledProcessError:\n Utilities.log(\"Could not run 'vagrant halt'.\")\n exit(1)",
"def detach(target, sysip):\n click.secho(\"Attempting to detach template.\")\n\n payload = {\n \"deviceType\":\"vedge\",\n \"devices\":[ \n {\n \"deviceId\":str(target),\n \"deviceIP\":str(sysip)\n }\n ]\n }\n\n url = base_url + \"/template/config/device/mode/cli\"\n\n response = requests.post(url=url, data=json.dumps(payload), headers=header, verify=False)\n if response.status_code == 200:\n id = response.json()[\"id\"]\n url = base_url + \"/device/action/status/\" + str(id)\n while(1):\n status_res = requests.get(url,headers=header,verify=False)\n if status_res.status_code == 200:\n push_status = status_res.json()\n if push_status['summary']['status'] == \"done\":\n if 'Success' in push_status['summary']['count']:\n print(\"Changed configuration mode to CLI\")\n elif 'Failure' in push_status['summary']['count']:\n print(\"Failed to change configuration mode to CLI\")\n exit()\n break\n else:\n print(\"Failed to detach template with error \" + response.text)\n exit()",
"def detach_volume(self,\n connection_info,\n instance,\n mountpoint,\n encryption=None):\n volume_data = connection_info['data']\n azure_name = self._get_omni_name_from_instance(instance)\n azure_instance = utils.get_instance(\n self.compute_client, drv_conf.resource_group, azure_name)\n data_disks = azure_instance.storage_profile.data_disks\n name = volume_data['name']\n filtered_disks = [disk for disk in data_disks if disk.name != name]\n if len(filtered_disks) == len(data_disks):\n LOG.error(\"Volume %s was not attached to instance %s\" %\n (name, instance.uuid))\n return\n azure_instance.storage_profile.data_disks = filtered_disks\n utils.create_or_update_instance(self.compute_client,\n drv_conf.resource_group, azure_name,\n azure_instance)\n LOG.info(\"Detached volume %s from instance %s\" % (name, instance.uuid))",
"def _wipe(self):\n log_method_call(self, self.name, status=self.status)\n\n start = self.partedPartition.geometry.start\n part_len = self.partedPartition.geometry.end - start\n bs = self.partedPartition.geometry.device.sectorSize\n device = self.partedPartition.geometry.device.path\n\n # Erase 1MiB or to end of partition\n count = int(Size(\"1 MiB\") / bs)\n count = min(count, part_len)\n\n cmd = [\"dd\", \"if=/dev/zero\", \"of=%s\" % device, \"bs=%s\" % bs,\n \"seek=%s\" % start, \"count=%s\" % count]\n try:\n util.run_program(cmd)\n except OSError as e:\n log.error(str(e))\n finally:\n # If a udev device is created with the watch option, then\n # a change uevent is synthesized and we need to wait for\n # things to settle.\n udev.settle()",
"def _unprovision_node(self, conn):\n conn.run(f\"rm -rf {EXPORTER_HOME}\")"
] | [
"0.765152",
"0.71042055",
"0.710292",
"0.7051662",
"0.6706635",
"0.66381764",
"0.65310085",
"0.63526535",
"0.62885463",
"0.62843525",
"0.6217464",
"0.62058824",
"0.6176701",
"0.61576825",
"0.6140649",
"0.6140027",
"0.6105745",
"0.6038374",
"0.6036814",
"0.5985441",
"0.58947974",
"0.5870777",
"0.5870777",
"0.5830202",
"0.5812258",
"0.57748884",
"0.57565343",
"0.5755057",
"0.57426465",
"0.5661209"
] | 0.74518675 | 1 |
Return volume connector information. | def get_volume_connector(self, instance):
iqn = volume_util.get_host_iqn(self._session, self._cluster)
return {
'ip': CONF.vmwareapi_host_ip,
'initiator': iqn,
'host': CONF.vmwareapi_host_ip
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize_connection(self, volume, connector):\n export = '%s/%s' % (volume['provider_location'], volume['name'])\n data = {'export': export, 'name': 'volume'}\n if volume['provider_location'] in self.shares:\n data['options'] = self.shares[volume['provider_location']]\n return {\n 'driver_volume_type': self.driver_volume_type,\n 'data': data\n }",
"def get_volume_connector(self, instance):\n props = {}\n # 'get_volume_connector' will be invoked during creation\n # of the partition and during deletion of the partition.\n # But 'wwpns' we can access only when partition is available.\n # During spawn flow 'get_volume_connector' function will be called\n # before 'spawn' function so to get 'wwpns' we first creating\n # the partition using 'prep_for_spawn' function so that\n # we can access 'wwpns'.(i.e - else part)\n # But during deletion 'get_volume_connector' will be called\n # after 'destroy' function which will delete the partition so\n # after that we can not get the 'wwpns'\n # In order to get 'wwpns' after 'destroy' function we are\n # saving 'wwpns' before deleting partition in 'destroy' function\n # in 'deleted_instance_wwpns_mapping' variable and using these 'wwpns'\n # in 'get_volume_connector'(i.e - if part)\n # after using these 'wwpns' we are removing these 'wwpns' from\n # 'deleted_instance_wwpns_mapping' variable because\n # we are not going to use these 'wwpns' any more after this.\n if instance.uuid in self.deleted_instance_wwpns_mapping:\n props['wwpns'] = self.deleted_instance_wwpns_mapping.pop(\n instance.uuid)\n else:\n inst = vm.PartitionInstance(instance, self._cpc)\n props['wwpns'] = inst.get_partition_wwpns()\n\n props['host'] = instance.uuid\n\n return props",
"def initialize_connection(\n self, volume, connector, is_snapshot=False, lun=None,\n is_mirror=False):\n conn_info, map_info = super(HBSDRESTISCSI, self).initialize_connection(\n volume, connector, is_snapshot, lun)\n return conn_info",
"def _get_data(self):\n c = Connector(self.host, self.username, self.password)\n return c.getLanDevices()",
"def initialize_connection(self, volume, connector):\n # Non-shared connections was the original implementation where all the\n # export & mapping was done on export and the connection info was\n # stored in the volume, so let the original implementation handle it.\n if not self.share_targets:\n return super().initialize_connection(volume, connector)\n\n # For the shared case the export only stores the path of the volume\n volume_path = volume.provider_location\n if not os.path.exists(volume_path):\n raise exception.InvalidConfigurationValue(\n 'Target driver configured with shared targets, but volume '\n 'exported as non shared.')\n\n nqn, ns_id = self._map_volume(volume, volume_path, connector)\n uuid = self._get_nvme_uuid(volume)\n return {\n 'driver_volume_type': self.protocol,\n 'data': self._get_connection_properties(nqn,\n self.target_ips,\n self.target_port,\n self.nvme_transport_type,\n ns_id, uuid),\n }",
"def volume():\n vol = sonos.volume\n return vol",
"def Vc(self):\n return self.__central_volume",
"def connect_volume(self, connection_properties):\n\n sheepdog_handle = self._get_sheepdog_handle(connection_properties)\n return {'path': sheepdog_handle}",
"def volume(self):\n return {'lvad': self._v}",
"def connector(self):\n if '_connector' not in self.__dict__:\n from meerschaum.connectors.parse import parse_instance_keys\n conn = parse_instance_keys(self.connector_keys)\n if conn:\n self._connector = conn\n else:\n return None\n return self._connector",
"def get_volume(self):\n return self.__volume",
"def get_connector_properties(root_helper, *args, **kwargs):\n return {}",
"def ConnectionInfo(self):\n if (self._accountKind == \"azure\"):\n print(\"%s: %s\" % (DataConnection.accountName, self._accountName))\n print(\"%s: %s\" % (DataConnection.accountKind, self._accountKind))\n else:\n raise NotImplementedError(DataConnection.notYetImplementedMsg)",
"def device_info(self):\n info = {\n \"connections\": {(CONNECTION_NETWORK_MAC, self._data[\"port-mac-address\"])},\n \"manufacturer\": self._ctrl.data[\"resource\"][\"platform\"],\n \"model\": self._ctrl.data[\"resource\"][\"board-name\"],\n \"name\": f\"{self._inst} {self._data['default-name']}\",\n }\n return info",
"def volume(self):\n return self.structure.volume",
"def get_volume_info(host, disk_object, dc_obj):\n host_resource = get_host_resource_by_name(host)\n\n vol_id = disk_object.get_image_id()\n sd_id = disk_object.get_storage_domains().get_storage_domain()[0].get_id()\n image_id = disk_object.get_id()\n sp_id = dc_obj.get_id()\n\n args = {\n \"storagepoolID\": sp_id,\n \"storagedomainID\": sd_id,\n \"imageID\": image_id,\n \"volumeID\": vol_id,\n }\n\n return host_resource.vds_client(cmd=\"Volume.getInfo\", args=args)",
"def getVolume(self):\n return _libsbml.Compartment_getVolume(self)",
"def volume(self):\n return self._volume()",
"def volume(self):\n return self._volume()",
"def getVolume(self):\n return self.__volume",
"def get_volume(self):\n return str(round(self._call_player_proxy('VolumeGet', None).unpack()[0]))",
"def initialize_connection(self, volume, connector):\n # create client\n initiator_iqn = connector['initiator']\n self.create_client(initiator_iqn)\n auth = self._get_auth_for_client(initiator_iqn)\n username = initiator_iqn\n if not auth['password']:\n password = volume_utils.generate_password(length=self.CHAP_LENGTH)\n self._set_chap_for_client(initiator_iqn, username, password)\n else:\n LOG.debug(\"using existing CHAP password\")\n password = auth['password']\n\n # add disk for export\n iscsi_config = self._get_config()\n\n # First have to ensure that the disk is registered with\n # the gateways.\n self.create_disk(volume.name)\n self.register_disk(self.target_iqn, volume.name)\n\n iscsi_config = self._get_config()\n # Now export the disk to the initiator\n lun = self.export_disk(initiator_iqn, volume.name, iscsi_config)\n\n # fetch the updated config so we can get the lun id\n iscsi_config = self._get_config()\n target_info = iscsi_config['targets'][self.target_iqn]\n ips = target_info['ip_list']\n\n target_portal = ips[0]\n if netutils.is_valid_ipv6(target_portal):\n target_portal = \"[%s]:3260\" % target_portal\n else:\n target_portal = \"%s:3260\" % target_portal\n\n data = {\n 'driver_volume_type': 'iscsi',\n 'data': {\n 'target_iqn': self.target_iqn,\n 'target_portal': target_portal,\n 'target_lun': lun['id'],\n 'auth_method': 'CHAP',\n 'auth_username': username,\n 'auth_password': password,\n }\n }\n return data",
"def get_device_information(self):\n return self.mycam.devicemgmt.GetDeviceInformation()",
"def get_volume_info(volumes):\n if type(volumes) is not list:\n volumes = [volumes]\n volume_info_list = []\n for volume in volumes:\n command = 'cinder show %s' % volume['id']\n volume_info = parse_output(Popen(command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0])\n att = volume_info['attachments'].replace(\"'\", \"\\\"\").replace(\n \"u\\\"\", \"\\\"\").replace(\" None,\", \" \\\"None\\\",\")\n volume_info['device'] = json.loads(att)[0]['device']\n volume_info_list.append(volume_info)\n return volume_info_list",
"def device_info(self):\n return {\n \"name\": self._alias,\n \"model\": self._model,\n \"manufacturer\": \"TP-Link\",\n \"connections\": {(dr.CONNECTION_NETWORK_MAC, self._mac)},\n \"sw_version\": self._sysinfo[\"sw_ver\"],\n }",
"def extract_volume(self):\n\n # RDD or array of [(partition, vol)]\n vols = None\n if self.usespark:\n vols = self._retrieve_vol(self.current_spot, None)\n else:\n vols = self._retrieve_vol(self.current_spot, len(self.partitions))\n self.current_spot += len(self.partitions)\n \n return vols",
"def get_ceph_drv_info():\n disks_info = []\n stat = psutil.disk_io_counters(perdisk=True)\n for drv in get_ceph_disk():\n info = CEPHDiskInfo(drv)\n disk = basename(drv)\n if disk in stat:\n info.rd_cnt = stat[disk].read_count\n info.wr_cnt = stat[disk].write_count\n info.rd_bytes = stat[disk].read_bytes\n info.wr_bytes = stat[disk].write_bytes\n info.rd_time = stat[disk].read_time\n info.wr_time = stat[disk].write_time\n\n disks_info.append(info)\n\n return disks_info",
"def volumes(self):",
"def get_basic_volume_info(vol_name, vl=None):\n return_dict = None\n try:\n vl, err = get_basic_volume_info_all()\n for v in vl:\n if v['name'] == vol_name:\n return_dict = v\n break\n except Exception, e:\n return None, 'Error getting basic volume information for a specific volume : %s' % str(e)\n else:\n return return_dict, None",
"def _get_connector_type(self):\n\n raise NotImplementedError()"
] | [
"0.6858231",
"0.6418148",
"0.62130344",
"0.60607785",
"0.6042523",
"0.58188075",
"0.5792801",
"0.5697431",
"0.55846596",
"0.55579096",
"0.5506042",
"0.5469819",
"0.5426191",
"0.5425665",
"0.54218316",
"0.5421626",
"0.54012877",
"0.53762865",
"0.53762865",
"0.5360571",
"0.53501505",
"0.5332365",
"0.5330535",
"0.53274167",
"0.53186363",
"0.53176093",
"0.5315009",
"0.53113294",
"0.53073996",
"0.5284102"
] | 0.7179875 | 0 |
check that columns_lst is tbset of self.df.columns.names | def validate_col_lst(self, df, columns_lst):
if columns_lst == []:
raise ValueError("column_lst is empty")
col_set = set(columns_lst)
df_col_set = set(list(df))
if col_set - df_col_set != set():
msg = "col_lst has columns name that does not exists in the DataFrame columns:{}".format(
str(col_set - df_col_set))
print(msg)
raise ValueError(msg)
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def verify_columns_in_dataframe(df, columns):\n\n if not isinstance(columns, list):\n columns = [columns]\n return set(columns).issubset(df.columns)",
"def _check_columns(df: pd.DataFrame, names: typing.Sequence[str]) -> None:\n for expected in names:\n if expected not in df.columns:\n raise ValueError(f\"'{expected}' column not found in input\")\n return",
"def check_columns_in_dataframe(df: pd.DataFrame, columns: Tuple[str]) -> None:\n\n for col in columns:\n if col not in df.columns:\n raise ValueError(f\"Column {col} is not in the dataframe.\")",
"def _validate_columns(self, names):\n if not is_list_like(names):\n raise ValueError(\"Columns should be list-like\")\n\n if len(set(names)) != len(names):\n raise ValueError(\"Duplicate column names\")\n\n if self._data and len(names) != len(self._data[0]):\n raise ValueError(\"Invalid columns length\")",
"def checkcolumnstest(chosen_columns, chosen_df):\n if not all([item in chosen_columns for item in chosen_df.columns]):\n raise ValueError('Columns do not match')",
"def get_needed_columns(df, list_of_columns):\n return df[list_of_columns]",
"def cols_valid(self,\n df: pd.DataFrame,\n req_cols: set) -> bool:\n missing_cols = req_cols.difference(df.columns)\n\n if len(missing_cols) > 0:\n logging.error(f\"{missing_cols} columns required but missing\")\n return False\n\n return True",
"def __checkcolumns(self, lista: List[str]) -> True:\r\n\r\n if isinstance(lista, list) is False:\r\n raise TypeError(f\"{lista} has to be a list.\")\r\n if len(lista) != 10:\r\n raise ValueError(f\"{lista} must have 10 columns\")\r\n\r\n errorlista = []\r\n\r\n # Regarding 'self.tiposDisponiveis',\r\n # Layer and Marked happens on the same column.\r\n # if there is 'layer', 'marked' won't show up, and viceversa.\r\n # Therefore 'self.tiposDisponiveis' is a list with 11 elements. While 'lista' is a list with 10 elements.\r\n\r\n for _ in lista:\r\n # searching for 'Layer'\r\n if self.tiposDisponiveis[0].lower() == _.lower():\r\n break\r\n else:\r\n # if 'Layer' wasn't found, searching for 'Marked'\r\n for _ in lista:\r\n if self.tiposDisponiveis[1].lower() == _.lower():\r\n break\r\n else:\r\n # If none of the two are present on the line, add to the error list\r\n errorlista.append(\"Layer Or Marked\")\r\n \r\n # repeat the search for all the remaining required values\"\"\"\r\n for _ in range(2, len(self.tiposDisponiveis)-1):\r\n for x in lista:\r\n if x.lower() == self.tiposDisponiveis[_].lower():\r\n break\r\n else:\r\n # Didn't find this column in the list\r\n errorlista.append(f\"{self.tiposDisponiveis[_]}\")\r\n\r\n # Raising the errors, if any occurred.\r\n if len(errorlista) > 0:\r\n raise ValueError(f\"{errorlista} <- These columns are missing from format.\")\r\n\r\n # Last column has to be 'Text'\r\n if lista[9].lower() != self.tiposDisponiveis[10].lower():\r\n raise ValueError(f\"{lista[9]} last element has to be 'Text'.\")\r\n \r\n return True",
"def _check_columns_with_table(table: Table, columns: Sequence[str]) -> Optional[bool]:\n for column in columns:\n if column not in table.c.keys():\n raise TypeError(f\"Specified column {column} did not exist on table {table}\")\n return True",
"def __column_intersect(df, list_):\n return set(list_).intersection(set(df.columns.tolist()))",
"def _dataframe_column_check(df: DataFrame, compulsory_columns: Sequence) -> None:\n if not set(compulsory_columns).issubset(df.columns):\n diff = set(compulsory_columns).difference(df.columns)\n msg = (\n \"The following compulsory column(s) are missing from the \"\n f\"DataFrame: {diff}\"\n )\n raise ValueError(msg)",
"def check_col(self):\n return (set(map(lambda x: x.lower(),\n self.config['dtypes'])) -\n set(self.metadata.name.values))",
"def _check_columns(\n schema_errors: set[str],\n stored: Mapping,\n expected: Mapping,\n columns: Iterable[str],\n table_name: str,\n supports: str,\n) -> None:\n for column in columns:\n if stored[column] == expected[column]:\n continue\n schema_errors.add(f\"{table_name}.{supports}\")\n _LOGGER.error(\n \"Column %s in database table %s does not support %s (stored=%s != expected=%s)\",\n column,\n table_name,\n supports,\n stored[column],\n expected[column],\n )",
"def _check_missing_columns(self, df: pd.DataFrame) -> None:\n if any([c not in df.columns for c in REQUIRED_COLUMNS]):\n raise ValueError(\"Missing columns in dataset.\"\n f\"Columns: {df.columns}\"\n f\"Required: {REQUIRED_COLUMNS}\")",
"def check_base_fields(df,base_fields):\n emp_list = []\n for item in base_fields:\n if item not in list(df.columns):\n emp_list.append(item)\n\n return emp_list",
"def _check_headers(cursor, headers):\n all_columns = set(chain.from_iterable(_columns(cursor, table) for table in DATA_TABLES))\n for header in headers:\n if header not in all_columns:\n raise ValueError('column {} not recognized'.format(header))",
"def has_columns(df, columns):\n result = True\n for column in columns:\n if column not in df.columns:\n print(\"Missing column: {} in DataFrame\".format(column))\n result = False\n\n return result",
"def get_columns_for(self, column_names, row, dbrow=None):\n if dbrow:\n candidates = filter(lambda col: col not in self.insert_only_fields, column_names)\n else:\n candidates = column_names\n\n return set(candidates).intersection(row._fields)",
"def _assert_columns_exist(self, columns):\n if not nonstringiter(columns):\n columns = (columns,)\n self_cols = self.columns()\n is_missing = lambda col: col not in self_cols\n missing = [c for c in columns if is_missing(c)]\n if missing:\n missing = ', '.join(repr(x) for x in missing)\n msg = '{0} not in {1}'.format(missing, self.__repr__())\n raise LookupError(msg)",
"def check_dataframe_columns(df):\r\n if len(set(df.columns).intersection(\r\n set([constants.CASE_CONCEPT_NAME, xes_constants.DEFAULT_NAME_KEY,\r\n xes_constants.DEFAULT_TIMESTAMP_KEY]))) < 3:\r\n raise Exception(\r\n \"please format your dataframe accordingly! df = pm4py.format_dataframe(df, case_id='<name of the case ID column>', activity_key='<name of the activity column>', timestamp_key='<name of the timestamp column>')\")",
"def verify_columns_in_dataset(self, columns):\n all_cols = self.train.columns\n for col in columns:\n if not col in all_cols:\n raise KeyError(\"column '%s' not in dataset\" % col)",
"def check_column(self, columns):\n for i in columns:\n if i.name == self.name:\n raise ColumnNameAlreadyInTableException(f'Column \"{self.name}\" is already in the table!')\n return True",
"def _validate_optional_columns(data, optional_columns: Iterable[str]) -> List[str]:\n return [col for col in optional_columns if col in data.columns]",
"def is_cols_valid(bd):\n for col in cols:\n seen = []\n for num in nums:\n if bd[col[num]] == \" \":\n continue\n elif bd[col[num]] not in seen:\n seen += [bd[col[num]]]\n else:\n return False\n else:\n continue\n return True",
"def check_ingress_required_columns(self, col_names):\n if not set(col_names).issuperset(REQUIRED_COLUMNS):\n if not set(col_names).issuperset(REQUIRED_ALT_COLUMNS):\n missing_columns = [x for x in REQUIRED_ALT_COLUMNS if x not in col_names]\n return missing_columns\n return None",
"def verify_columns_in_dataset(self, columns):\n all_cols = self.dataset.columns\n for col in columns:\n if not col in all_cols:\n raise KeyError(\"column '%s' not in dataset\" % col)",
"def mpl_args_to_meta_cols(df, **kwargs):\n cols = set()\n for arg, value in kwargs.items():\n if is_str(value) and value in df.meta.columns:\n cols.add(value)\n return list(cols)",
"def _selected_columns(self):\n selected_columns = set()\n for feature in self.features:\n columns = feature[0]\n if isinstance(columns, list):\n selected_columns = selected_columns.union(set(columns))\n else:\n selected_columns.add(columns)\n return selected_columns",
"def get_columns(hdu, columns):\n if columns is not None:\n columns = columns.split(',')\n columns = [c.lower() for c in columns]\n else:\n columns = hdu.get_colnames()\n\n return columns",
"def test_columns_str_error(self):\n\n df = d.create_df_1()\n\n x = BaseTransformer(columns=None)\n\n x.columns = \"a\"\n\n with pytest.raises(ValueError):\n\n x.columns_check(X=df)"
] | [
"0.7229939",
"0.69595104",
"0.6792466",
"0.6648041",
"0.66035503",
"0.65919405",
"0.65672946",
"0.6438893",
"0.64331186",
"0.6377156",
"0.6362247",
"0.63079476",
"0.62102634",
"0.61988276",
"0.6196506",
"0.61454993",
"0.61221856",
"0.61137414",
"0.61022323",
"0.6095521",
"0.6069956",
"0.6036684",
"0.6021152",
"0.5997814",
"0.5990277",
"0.5921229",
"0.5898715",
"0.589218",
"0.5876364",
"0.58750707"
] | 0.83148104 | 0 |
given an OU, find all the OUs within that OU... | def get_child_ous(logger, org_client, org_unit):
logger.debug("Getting OUs for: %s", org_unit)
result = [org_unit]
# for this OU, get all the children...
args = dict(ParentId=org_unit["Id"])
children = utils.generic_paginator(logger, org_client.list_organizational_units_for_parent,
"OrganizationalUnits", **args)
# update child paths and then call ourselves recursively to find all children
for child in children:
child["Path"] = "{}/{}".format(org_unit["Path"], child["Name"]).replace("//", "/")
result.extend(get_child_ous(logger, org_client, child))
return result | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_accounts_for_ou(logger, options, org_client, path):\n logger.debug(\"Getting accounts for OU: %s\", path)\n org_unit = get_ou_from_path(logger, org_client, path)\n ous = []\n if options.no_recursive:\n ous.append(org_unit)\n else:\n ous.extend(get_child_ous(logger, org_client, org_unit))\n\n result = []\n for org_unit in ous:\n args = {\"ParentId\":org_unit[\"Id\"]}\n accounts = utils.generic_paginator(logger, org_client.list_accounts_for_parent,\n \"Accounts\", **args)\n for acc in accounts:\n acc[\"Path\"] = org_unit[\"Path\"]\n if 'Status' in acc:\n if acc['Status'] != 'SUSPENDED':\n result.append(acc)\n else:\n logger.info(\"found suspended account %s, ignoring it.\" % acc)\n return result",
"def list_ou(self, _):\n cn_re = re_compile(\"{[^}]+}\")\n results = self.engine.query(self.engine.GPO_INFO_FILTER(), [\"cn\", \"displayName\"])\n gpos = {}\n for gpo in results:\n gpos[gpo[\"cn\"]] = gpo[\"displayName\"]\n\n results = self.engine.query(self.engine.OU_FILTER())\n for result in results:\n print(result[\"distinguishedName\"])\n if \"gPLink\" in result:\n guids = cn_re.findall(result[\"gPLink\"])\n if len(guids) > 0:\n print(\"[gPLink]\")\n print(\"* {}\".format(\"\\n* \".join([gpos[g] if g in gpos else g for g in guids])))",
"def _get_ou_ids(self, org):\n\n # get root id\n root_id = self._get_root_id(org)\n\n # get OUs under the Org root\n ou_list_at_root_level = self._list_ou_for_parent(org, root_id)\n\n _ou_name_to_id_map = {}\n _all_ou_ids = []\n\n for ou_at_root_level in ou_list_at_root_level:\n # build list of all the OU IDs under Org root\n _all_ou_ids.append(ou_at_root_level.get('Id'))\n # build a list of ou id\n _ou_name_to_id_map.update(\n {ou_at_root_level.get('Name'): ou_at_root_level.get('Id')}\n )\n\n self.logger.info(\"Print OU Name to OU ID Map\")\n self.logger.info(_ou_name_to_id_map)\n\n return _all_ou_ids, _ou_name_to_id_map",
"def list_all_organizations(ctx):\n pprint(ctx.obj.orgs.get().data)",
"async def get_organizations(request: Request):\n redis = request.app.state.redis\n organizations_obj = orjson.loads(await redis.get_key(\"influxdb_organizations\"))\n return [org for org in organizations_obj]",
"def createOuInLDAP(ldir, ou):\n\n dn = 'ou=%s,%s' % (ou, ldir.ldap_base_creation)\n attrs = {'objectClass': ['top', 'organizationalUnit'],\n 'ou': ou}\n ldir.insertLDAP(dn, attrs)",
"def list_orgs(self):\n orgs = list(self.orgs.keys())\n orgs.sort()\n return orgs",
"def test_retrieve_l_organizations(self):\n pass",
"def organizations(self):\n return self.get('{}/orgs'.format(ApiVersion.A1.value))",
"def getAllRooms(z, opts):\n params = {}\n dmerge(params, parse_param('@attrs=uid'))\n dmerge(params, parse_param('@types=resources'))\n #dmerge(params, parse_param('@limit=5'))\n response = z.request('SearchDirectoryRequest', params=params, opts=opts)\n names = [item['name'] for item in response['SearchDirectoryResponse']['calresource']]\n return names",
"def get_ou_from_path(logger, org_client, path):\n logger.debug(\"Getting OU from path: %s\", path)\n\n current_ou = org_client.list_roots()[\"Roots\"][0][\"Id\"]\n if path == \"/\":\n return {\"Id\":current_ou, \"Path\":path}\n\n for dir_name in path.split(\"/\")[1:]:\n logger.debug(\"Getting OU from path: %s, looking for: %s\", path, dir_name)\n found = False\n args = dict(ParentId=current_ou)\n children = utils.generic_paginator(logger, org_client.list_organizational_units_for_parent,\n \"OrganizationalUnits\", **args)\n\n for org_unit in children:\n if org_unit[\"Name\"] == dir_name:\n current_ou = org_unit[\"Id\"]\n found = True\n break\n\n if not found:\n raise ValueError(\"OU path not found\")\n\n return {\"Id\":current_ou, \"Path\":path}",
"def get_ldap_users(conn, searchfilter, attrs):\n\n base_dn = conn.server.info.other['DefaultNamingContext'][0]\n conn.search(search_base=base_dn, search_filter=searchfilter, attributes=attrs)\n return conn.entries",
"def organizations(self):\n self.elements('organizations')",
"def organizations_at_location(self, location):\n if location is None:\n queryset = self.filter(location=None)\n elif location.region is None:\n queryset = self.filter(Q(location=None) | Q(location=location))\n elif location.tik is None:\n queryset = self.filter(Q(location=None) | Q(location__id__in=[location.region_id, location.id]))\n else:\n queryset = self.filter(Q(location=None) | Q(location__id__in=[location.tik_id, location.region_id, location.id]))\n\n organization_ids = set(queryset.values_list('organization_id', flat=True))\n\n organizations = Organization.objects.filter(id__in=organization_ids).order_by('title')\n\n for representative in OrganizationRepresentative.objects.filter(organization__in=organization_ids):\n organization = (filter(lambda org: org.id==representative.organization_id, organizations) or [None])[0]\n if organization:\n organization.representative = True\n\n return organizations",
"def get_orgs():\n \n url = \"https://api.github.com/user/orgs\"\n \n org_urls = []\n orgs = utils.get_json(url)\n \n for org in orgs:\n org_urls.append(org[\"url\"])\n \n return org_urls",
"def all_organizations(\n self,\n page: int | None = None,\n per_page: int | None = None,\n include_totals: bool = True,\n from_param: str | None = None,\n take: int | None = None,\n ):\n\n params = {\n \"page\": page,\n \"per_page\": per_page,\n \"include_totals\": str(include_totals).lower(),\n \"from\": from_param,\n \"take\": take,\n }\n\n return self.client.get(self._url(), params=params)",
"def test_getorgs(self):\n pass",
"def getUsersByOrganisation(SID, organisation_id, start, max, orderby, asc):\n return call(\"getUsersByOrganisation\", SID, organisation_id, start, max, orderby, asc)",
"def atlas_organizations():\n pass",
"def get_owner_entities(self, username):\n\t\t#print('Quasar Utility Server getting owner entities for username{' + username + '}')\n\t\treturn self._send_command_to_entity_server(us.SERVER_COMMAND_GET_OWNER_ENTITIES, username)",
"def test_organizations_list(self):\n pass",
"def organizations_owned(self):\n return sorted(set([team.org for team in self.teams if team.org.owners == team]),\n key=lambda o: o.title)",
"def orca_list():\n val = []\n val.append('orca')\n val.append('orca-b3lyp')\n return val",
"def offices_ldap():\n conn = Connection(\"ldap.laas.fr\", auto_bind=True)\n conn.search(\n \"dc=laas,dc=fr\",\n \"(laas-mainGroup=gepetto)\",\n attributes=[\"sn\", \"givenName\", \"roomNumber\", \"st\"],\n )\n offices = Offices()\n for entry in conn.entries:\n room, gn, sn, st = (\n str(entry.roomNumber),\n str(entry.givenName),\n str(entry.sn),\n str(entry.st),\n )\n if (\n st not in [\"JAMAIS\", \"NON-PERTINENT\"]\n and date(*(int(i) for i in reversed(st.split(\"/\")))) < date.today()\n ):\n continue # filter out alumni\n if room == \"[]\":\n continue # filter out the Sans-Bureaux-Fixes\n offices[room].add(Gepettist(sn, gn))\n return offices",
"def test_get_all_for_organization(self):\n org = Organization.create(name='foo', program_id=self.program.uid)\n org.put()\n user = User.create(name='foo', email='[email protected]',\n owned_organizations=[org.uid])\n user.put()\n response = self.testapp.get(\n '/api/organizations/{}/users'.format(org.uid),\n headers=self.login_headers(user),\n )\n response_list = json.loads(response.body)\n self.assertEqual(len(response_list), 1)",
"def create_ou(self, dn, name, description):\n attrs = {'objectclass': ['top', 'organizationalUnit'], 'ou': name, 'description': description}\n self.add_entry(dn, attrs)",
"def computeMailboxOu(portal, title):\n\n catalog = portal.portal_catalog\n\n current = cleaned = toAscii(title).lower()\n i = 1\n existing = True\n while existing:\n existing = catalog(ou=current)\n if not existing:\n break\n current = '%s_%d' % (cleaned, i)\n i += 1\n dtool = getToolByName(portal, 'portal_directories', None)\n if dtool is not None: # not in unit tests\n ldir = getattr(dtool, 'local_addressbook_ldap', None)\n if ldir is not None:\n createOuInLDAP(ldir, current)\n return current",
"def myorgs(request):\n context = RequestContext(request)\n \n user = request.user\n orgs = user.orgusers.get_query_set()\n \n context['orgs'] = orgs\n return render_to_response('myorgs.html', context)",
"def list_by_owner(owner_name):\n # TODO: move to search\n owner = account.find(owner_name)\n return Resource.query.join(Resource.owner).filter(Account.name==owner.name)",
"def list_all():\n\n members = ldapi.search(ld, cfg['ldap_users_base'], '(objectClass=member)')\n return dict([(member[0], member[1]) for member in members])"
] | [
"0.6812674",
"0.67796934",
"0.61813617",
"0.5930881",
"0.58512056",
"0.54681545",
"0.54233587",
"0.53596294",
"0.52998245",
"0.52364296",
"0.51616734",
"0.51128006",
"0.50904953",
"0.50900835",
"0.5064042",
"0.50519156",
"0.50236064",
"0.5009377",
"0.4984651",
"0.49781162",
"0.490988",
"0.48999056",
"0.48667663",
"0.4866166",
"0.48606572",
"0.48567817",
"0.48500574",
"0.48356378",
"0.4827705",
"0.47997653"
] | 0.6912658 | 0 |
given a path, traverse Organizations OUs to locate the required OU... | def get_ou_from_path(logger, org_client, path):
logger.debug("Getting OU from path: %s", path)
current_ou = org_client.list_roots()["Roots"][0]["Id"]
if path == "/":
return {"Id":current_ou, "Path":path}
for dir_name in path.split("/")[1:]:
logger.debug("Getting OU from path: %s, looking for: %s", path, dir_name)
found = False
args = dict(ParentId=current_ou)
children = utils.generic_paginator(logger, org_client.list_organizational_units_for_parent,
"OrganizationalUnits", **args)
for org_unit in children:
if org_unit["Name"] == dir_name:
current_ou = org_unit["Id"]
found = True
break
if not found:
raise ValueError("OU path not found")
return {"Id":current_ou, "Path":path} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_accounts_for_ou(logger, options, org_client, path):\n logger.debug(\"Getting accounts for OU: %s\", path)\n org_unit = get_ou_from_path(logger, org_client, path)\n ous = []\n if options.no_recursive:\n ous.append(org_unit)\n else:\n ous.extend(get_child_ous(logger, org_client, org_unit))\n\n result = []\n for org_unit in ous:\n args = {\"ParentId\":org_unit[\"Id\"]}\n accounts = utils.generic_paginator(logger, org_client.list_accounts_for_parent,\n \"Accounts\", **args)\n for acc in accounts:\n acc[\"Path\"] = org_unit[\"Path\"]\n if 'Status' in acc:\n if acc['Status'] != 'SUSPENDED':\n result.append(acc)\n else:\n logger.info(\"found suspended account %s, ignoring it.\" % acc)\n return result",
"def get_child_ous(logger, org_client, org_unit):\n logger.debug(\"Getting OUs for: %s\", org_unit)\n result = [org_unit]\n\n # for this OU, get all the children...\n args = dict(ParentId=org_unit[\"Id\"])\n children = utils.generic_paginator(logger, org_client.list_organizational_units_for_parent,\n \"OrganizationalUnits\", **args)\n\n # update child paths and then call ourselves recursively to find all children\n for child in children:\n child[\"Path\"] = \"{}/{}\".format(org_unit[\"Path\"], child[\"Name\"]).replace(\"//\", \"/\")\n result.extend(get_child_ous(logger, org_client, child))\n\n return result",
"def test_retrieve_l_organizations(self):\n pass",
"def users_organizations(user):\n if not user or not user.is_authenticated():\n return None\n else:\n return get_users_organizations(user)",
"def parse_common_organization_path(path: str) -> Dict[str, str]:\n m = re.match(r\"^organizations/(?P<organization>.+?)$\", path)\n return m.groupdict() if m else {}",
"def parse_common_organization_path(path: str) -> Dict[str, str]:\n m = re.match(r\"^organizations/(?P<organization>.+?)$\", path)\n return m.groupdict() if m else {}",
"def organizations(self):\n self.elements('organizations')",
"def organizations(self):\n return self.get('{}/orgs'.format(ApiVersion.A1.value))",
"def test_retrieve_l_organization(self):\n pass",
"def save_organizations(self, user, path=None):\n # Redis has an end_cursor if we've collected this data before\n end_cursor = self.redis.get(''.join(['gh:', user.login, ':organizations:endCursor']))\n if end_cursor:\n end_cursor = end_cursor.decode('utf-8')\n end_cursor = ''.join(['\"', end_cursor, '\"'])\n organizations = u.organizations(first=100, after=end_cursor)\n else:\n organizations = u.organizations(first=100)\n if not organizations:\n return False\n while True:\n if organizations['data']['user']['organizations']['edges']:\n index = ''.join(['gh_organizations-', self.timestamp])\n self._write_to_datastore(index=index,\n doc_type='GithubOrganizations',\n document=organizations,\n login=user.login,\n path=path)\n has_next_page = organizations['data']['user']['organizations']['pageInfo']['hasNextPage']\n end_cursor = organizations['data']['user']['organizations']['pageInfo']['endCursor']\n if has_next_page:\n organizations = u.organizations(first=100, after=end_cursor)\n else:\n # Cache the end_cursor where we last collected data\n self.redis.set(''.join(['gh:', u.login, ':organizations:endCursor']), end_cursor)\n break\n else:\n break\n\n return True",
"def list_ou(self, _):\n cn_re = re_compile(\"{[^}]+}\")\n results = self.engine.query(self.engine.GPO_INFO_FILTER(), [\"cn\", \"displayName\"])\n gpos = {}\n for gpo in results:\n gpos[gpo[\"cn\"]] = gpo[\"displayName\"]\n\n results = self.engine.query(self.engine.OU_FILTER())\n for result in results:\n print(result[\"distinguishedName\"])\n if \"gPLink\" in result:\n guids = cn_re.findall(result[\"gPLink\"])\n if len(guids) > 0:\n print(\"[gPLink]\")\n print(\"* {}\".format(\"\\n* \".join([gpos[g] if g in gpos else g for g in guids])))",
"def test_organizations_read(self):\n pass",
"async def get_organizations(request: Request):\n redis = request.app.state.redis\n organizations_obj = orjson.loads(await redis.get_key(\"influxdb_organizations\"))\n return [org for org in organizations_obj]",
"def test_getorgs(self):\n pass",
"def traverse(name, furtherPath):",
"def resolve_path(self, path):\n if path:\n if path[0] == '/':\n #zope objects case\n try: return self.unrestrictedTraverse(path)\n except: pass\n else:\n #aliss (python) objects case\n try: return self.get_aliss_object(path)\n except: pass\n #case of no path\n pass",
"def computeMailboxOu(portal, title):\n\n catalog = portal.portal_catalog\n\n current = cleaned = toAscii(title).lower()\n i = 1\n existing = True\n while existing:\n existing = catalog(ou=current)\n if not existing:\n break\n current = '%s_%d' % (cleaned, i)\n i += 1\n dtool = getToolByName(portal, 'portal_directories', None)\n if dtool is not None: # not in unit tests\n ldir = getattr(dtool, 'local_addressbook_ldap', None)\n if ldir is not None:\n createOuInLDAP(ldir, current)\n return current",
"def test_getorganizations_item(self):\n pass",
"def calc_path_2_ORCIDs(path=curr,node1=None,node2=None):\n\n with open(path + '/' + 'ORCID_graph.pkl', 'rb') as f:\n G = pickle.load(f)\n\n if (node1 is None) or (node2 is None):\n with open(path + '/' + 'centrality.csv', 'rb') as f:\n centrality = csv.reader(f, delimiter='\\t')\n rn = 0\n for row in centrality:\n if rn == 0:\n tmp1 = row\n rn += 1\n elif rn == 1:\n tmp2 = row\n rn += 1\n else:\n break\n if node1 is None:\n node1 = tmp1[0]\n if node2 is None:\n node2 = tmp2[0]\n\n try:\n short_path = nx.algorithms.shortest_paths.generic.shortest_path(G, source=node1,target=node2)\n except:\n return []\n\n return short_path",
"def test_retrieve_l_organization_locations(self):\n pass",
"def atlas_organizations():\n pass",
"def sub_test_verify_organizations_paths(self):\n self.TR.has_active_organizations() # resets the active orgs\n original_active_count = len(self.TR.active_organizations)\n last_org = self.TR.active_organizations[0]\n last_org_upload_paths = last_org.org_machine_upload_paths()\n random_index = random.randrange(0, len(last_org_upload_paths))\n remove_file_or_dir(last_org_upload_paths[random_index])\n self.TR.verify_organizations_paths()\n self.assertNotEqual(original_active_count, len(self.TR.active_organizations))",
"def test_organizations_list(self):\n pass",
"def traverse(self, path):\n\n path_list = [s for s in path.split('/') if len(s) > 0 ]\n # print(path)\n # print('files:', self.files)\n directory = self.files\n index = 0\n while index < len(path_list) and path_list[index] in directory:\n if type(directory[path_list[index]]) is str: # directory is a file\n break\n directory = directory[path_list[index]]\n index += 1\n print('info', directory, path_list[index:])\n return directory, path_list[index:]",
"def get_iso_path(src_iso_path, iso, dvd_path_list):\n for path in dvd_path_list: \n for root, _, all_files in os.walk(path.strip()):\n for onefile in all_files: \n if iso.upper() == onefile.upper(): \n src_iso_path = os.path.join(root, onefile) \n initlog(\"find iso: %s\" % src_iso_path)\n break\n return src_iso_path",
"def test_get_organization(self):\n pass",
"def _get_ou_ids(self, org):\n\n # get root id\n root_id = self._get_root_id(org)\n\n # get OUs under the Org root\n ou_list_at_root_level = self._list_ou_for_parent(org, root_id)\n\n _ou_name_to_id_map = {}\n _all_ou_ids = []\n\n for ou_at_root_level in ou_list_at_root_level:\n # build list of all the OU IDs under Org root\n _all_ou_ids.append(ou_at_root_level.get('Id'))\n # build a list of ou id\n _ou_name_to_id_map.update(\n {ou_at_root_level.get('Name'): ou_at_root_level.get('Id')}\n )\n\n self.logger.info(\"Print OU Name to OU ID Map\")\n self.logger.info(_ou_name_to_id_map)\n\n return _all_ou_ids, _ou_name_to_id_map",
"def _get_path_objs(self, path_list):\n objs = []\n for path in path_list:\n obj = self.unrestrictedTraverse(path, None)\n if obj and getattr(obj, 'isPrincipiaFolderish', 0):\n objs.append(obj)\n \n return objs",
"def createOuInLDAP(ldir, ou):\n\n dn = 'ou=%s,%s' % (ou, ldir.ldap_base_creation)\n attrs = {'objectClass': ['top', 'organizationalUnit'],\n 'ou': ou}\n ldir.insertLDAP(dn, attrs)",
"def extract_organization(self, root):\n organization = {}\n info = root.xpath('.//li/h4/a')\n if info:\n link = info[0].get('href', None)\n name = info[0].get('title', None)\n if link and name:\n stmt = select([\n func.count(self.organization_table.c.path)\n ]).where(\n self.organization_table.c.path == link\n )\n results = self.connection.execute(stmt).fetchall()\n if results[0][0] > 0:\n self.logger.debug('{} already exists'.format(name))\n return None\n self.logger.debug('Querying {1}: {0}'.format(link, name))\n response = self.session.get(self.PODEROPEDIA_BASE_URL + link)\n content = response.content\n html_tree = etree.HTML(content, parser=self.parser)\n connections = html_tree.xpath('//div[@id=\"conexiones\"]')\n if connections:\n organization_data = self.extract_element_data(connections[0])\n organization['organization_data'] = organization_data if organization_data else {}\n organization['organization_data']['path'] = link\n\n person = self.extract_persons(connections[0])\n organization['member'] = person if person else []\n for item in organization['member']:\n item.update({'source_path': link})\n\n related_organization = self.extract_participation(connections[0])\n organization['organization'] = related_organization if related_organization else []\n for item in organization['organization']:\n item.update({'source_path': link})\n return organization"
] | [
"0.5943409",
"0.5662944",
"0.5541912",
"0.5307396",
"0.52833915",
"0.52833915",
"0.51416296",
"0.51335",
"0.5058733",
"0.5023612",
"0.50106674",
"0.4983386",
"0.49637613",
"0.49609458",
"0.49326625",
"0.49166146",
"0.49147454",
"0.49008152",
"0.4848601",
"0.48095214",
"0.47985002",
"0.47953928",
"0.47874194",
"0.47285697",
"0.47135007",
"0.46970645",
"0.46951032",
"0.46913072",
"0.46748507",
"0.46727067"
] | 0.7749507 | 0 |
given a path, get all the AWS accounts within that part of an Organization... | def get_accounts_for_ou(logger, options, org_client, path):
logger.debug("Getting accounts for OU: %s", path)
org_unit = get_ou_from_path(logger, org_client, path)
ous = []
if options.no_recursive:
ous.append(org_unit)
else:
ous.extend(get_child_ous(logger, org_client, org_unit))
result = []
for org_unit in ous:
args = {"ParentId":org_unit["Id"]}
accounts = utils.generic_paginator(logger, org_client.list_accounts_for_parent,
"Accounts", **args)
for acc in accounts:
acc["Path"] = org_unit["Path"]
if 'Status' in acc:
if acc['Status'] != 'SUSPENDED':
result.append(acc)
else:
logger.info("found suspended account %s, ignoring it." % acc)
return result | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def organizations(self):\n return self.get('{}/orgs'.format(ApiVersion.A1.value))",
"def ls():\n return dynamodb.ls(OrganizationModel)",
"async def get_organizations(request: Request):\n redis = request.app.state.redis\n organizations_obj = orjson.loads(await redis.get_key(\"influxdb_organizations\"))\n return [org for org in organizations_obj]",
"def get_account():\n\n bus = session_bus()\n\n goa_manager = bus.get_object(GOA_NAME, GOA_PATH)\n\n goa_objects = goa_manager.GetManagedObjects(dbus_interface=OBJECT_MANAGER)\n\n accounts = [\n obj for obj in goa_objects\n if obj != GOA_MANAGER_PATH\n ]\n\n if len(accounts) > 1:\n sys.exit(\"More than one account found.\")\n\n (account_path,) = accounts\n\n return bus.get_object(GOA_NAME, account_path)",
"def get_buckets_for_user(self):\n s3 = self.credentials.session.resource('s3')\n bucket_list = [bucket.name for bucket in s3.buckets.all()]\n\n return bucket_list;",
"def organizations(self):\n self.elements('organizations')",
"def test_get_namespaces_from_account(self):\n pass",
"def list_all_organizations(ctx):\n pprint(ctx.obj.orgs.get().data)",
"def test_get_namespaces_from_accounts(self):\n pass",
"def _get_arns(self):\n client = self._get_client()\n\n account_arns = set()\n\n for role in list_roles(**self.conn_details):\n account_arns.add(role['Arn'])\n\n for user in list_users(**self.conn_details):\n account_arns.add(user['Arn'])\n\n for page in client.get_paginator('list_policies').paginate(Scope='Local'):\n for policy in page['Policies']:\n account_arns.add(policy['Arn'])\n\n for page in client.get_paginator('list_groups').paginate():\n for group in page['Groups']:\n account_arns.add(group['Arn'])\n\n result_arns = set()\n for arn in self.arn_list:\n if arn.lower() == 'all':\n return account_arns\n\n if arn not in account_arns:\n self.current_app.logger.warn(\"Provided ARN {arn} not found in account.\".format(arn=arn))\n continue\n\n result_arns.add(arn)\n\n self.current_app.logger.debug(\"got %d arns\", len(result_arns))\n return list(result_arns)",
"def fetch_account_catalogs(account:str):\n for config in accounts:\n if account in config['streamers']:\n return config['catalogs']\n return",
"def get_accounts(self):\n uri = '/credentials'\n response = gate_request(uri=uri)\n assert response.ok, 'Failed to get accounts: {0}'.format(response.text)\n\n all_accounts = response.json()\n self.log.debug('Accounts in Spinnaker:\\n%s', all_accounts)\n\n filtered_accounts = []\n for account in all_accounts:\n if account['type'] == self.provider:\n filtered_accounts.append(account)\n\n if not filtered_accounts:\n raise ForemastError('No Accounts matching {0}.'.format(self.provider))\n\n return filtered_accounts",
"def _recurse(self) -> Iterator[str]:\n\n client: s3.Client = boto3.client('s3')\n\n decoded_url = urlparse(self.url)\n bucket_name = decoded_url.netloc\n\n paginator = client.get_paginator('list_objects_v2')\n\n page_iterator: PageIterator = paginator.paginate(\n Bucket=bucket_name,\n Prefix=decoded_url.path.lstrip('/'),\n )\n\n for page in page_iterator:\n records = page.get('Contents', [])\n\n for record in records:\n key = record['Key']\n yield f's3://{bucket_name}/{key}'",
"def fetch_accounts(self):\n return self.fetch('/accounts')",
"def get_organization_links(self):\n yield from self.get_resource_by_item(\"/orgs\")",
"def accountId():\n # save the lookup if we set the account to the environment\n if \"AWS_ACCOUNT_ID\" in os.environ:\n return os.environ[\"AWS_ACCOUNT_ID\"]\n conn = iamConn()\n funcs = [\n lambda: conn.get_user().get('get_user_response')\\\n .get('get_user_result').get('user').get('arn'),\n lambda: conn.list_roles(max_items=1).get('list_roles_response')\\\n .get('list_roles_result').get('roles')[0].get('arn'),\n ]\n for func in funcs:\n try:\n arn = func()\n break\n except (boto.exception.BotoServerError, IndexError):\n pass\n return arn.split(':')[4]",
"def parse_common_organization_path(path: str) -> Dict[str, str]:\n m = re.match(r\"^organizations/(?P<organization>.+?)$\", path)\n return m.groupdict() if m else {}",
"def parse_common_organization_path(path: str) -> Dict[str, str]:\n m = re.match(r\"^organizations/(?P<organization>.+?)$\", path)\n return m.groupdict() if m else {}",
"def get_accounts(self, session: \"Session\") -> List[Account]:\n\n self.__get_dn(session)\n\n result = session.soapclient.get_accounts_by_owner(self.dn)\n return [Account(session, account=r) for r in result]",
"def accounts(web3):\n return web3.eth.accounts",
"def fetch_owner_accounts():\n resp = oauth.tapkey.get('Owners')\n owner_accounts = resp.json()\n return owner_accounts",
"def get_organization_links_by_page(self):\n return self.get_resource_by_page(\"/orgs\")",
"def find_by_account(cls,account):\n for credentials in cls.credential_list:\n if credentials.account == account:\n return credentials",
"def get_orgs():\n \n url = \"https://api.github.com/user/orgs\"\n \n org_urls = []\n orgs = utils.get_json(url)\n \n for org in orgs:\n org_urls.append(org[\"url\"])\n \n return org_urls",
"def test_get_all_for_organization(self):\n org = Organization.create(name='foo', program_id=self.program.uid)\n org.put()\n user = User.create(name='foo', email='[email protected]',\n owned_organizations=[org.uid])\n user.put()\n response = self.testapp.get(\n '/api/organizations/{}/users'.format(org.uid),\n headers=self.login_headers(user),\n )\n response_list = json.loads(response.body)\n self.assertEqual(len(response_list), 1)",
"def amazon_accounts():\n import json\n from security_monkey.datastore import Account, AccountType\n from os.path import dirname, join\n\n data_file = join(dirname(dirname(__file__)), \"data\", \"aws_accounts.json\")\n data = json.load(open(data_file, 'r'))\n\n app.logger.info('Adding / updating Amazon owned accounts')\n try:\n account_type_result = AccountType.query.filter(AccountType.name == 'AWS').first()\n if not account_type_result:\n account_type_result = AccountType(name='AWS')\n db.session.add(account_type_result)\n db.session.commit()\n db.session.refresh(account_type_result)\n\n for group, info in data.items():\n for aws_account in info['accounts']:\n acct_name = \"{group} ({region})\".format(group=group, region=aws_account['region'])\n account = Account.query.filter(Account.identifier == aws_account['account_id']).first()\n if not account:\n app.logger.debug(' Adding account {0}'.format(acct_name))\n account = Account()\n else:\n app.logger.debug(' Updating account {0}'.format(acct_name))\n\n account.identifier = aws_account['account_id']\n account.account_type_id = account_type_result.id\n account.active = False\n account.third_party = True\n account.name = acct_name\n account.notes = info['url']\n\n db.session.add(account)\n\n db.session.commit()\n app.logger.info('Finished adding Amazon owned accounts')\n except Exception as e:\n app.logger.exception(\"An error occured while adding accounts\")\n store_exception(\"manager-amazon-accounts\", None, e)",
"def get_creds(bucket_name, folder_name):\n client = boto3.client('sts')\n # pprint(client.get_caller_identity())\n\n # A name of our choice (no spaces allowed)\n credential_name = f'AccessTimeline-{folder_name}'\n policy = {\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n # A name of our choice [0-9a-zA-Z]\n \"Sid\": f'ListBucket{folder_name}',\n \"Action\": [\n \"s3:ListBucket\",\n ],\n \"Effect\": \"Allow\",\n \"Resource\": [\n f'arn:aws:s3:::{bucket_name}',\n ],\n \"Condition\": {\n \"StringEquals\": {\n \"s3:prefix\": [\n f'{folder_name}/'\n ],\n \"s3:delimiter\": [\n '/'\n ],\n }\n }\n\n },\n {\n \"Sid\": f'GetObject{folder_name}',\n \"Action\": [\n \"s3:GetObject\",\n ],\n \"Effect\": \"Allow\",\n \"Resource\": [\n f'arn:aws:s3:::{bucket_name}/{folder_name}/*',\n ],\n\n # FIXME: it should be possible to place restrictions using Condition,\n # but I haven't been able to get this to work.\n #\n # \"Condition\": {\n # \"StringLike\": {\n # \"s3:prefix\": [\n # f'{folder_name}/*'\n # ],\n # \"s3:delimiter\": [\n # '/'\n # ],\n # }\n # }\n },\n ]\n }\n\n # pprint(policy)\n policy = json.dumps(policy)\n credential_lifetime = 900 # 900 seconds is the minimum value\n\n creds = client.get_federation_token(\n Name=credential_name,\n Policy=policy,\n DurationSeconds=credential_lifetime,\n )\n\n # pprint(creds)\n return creds['Credentials']",
"def list_buckets():\n for bucket in s3.buckets.all():\n print(bucket)",
"def listOrganizations(self, name='', type=''):\n return self.get_json('/organization', {'name': name, 'type': type})",
"def Accounts(self):\n\n if not self.connected:\n return []\n\n try:\n accounts_listing = _ReadNoProxy(\n GOOGLE_GCE_METADATA_ACCOUNTS_URI + '/')\n accounts_lines = accounts_listing.split()\n accounts = []\n for account_line in accounts_lines:\n account = account_line.strip('/')\n if account == 'default':\n continue\n accounts.append(account)\n return accounts\n except urllib2.HTTPError as e:\n raise MetadataServerException(e)\n except urllib2.URLError as e:\n raise CannotConnectToMetadataServerException(e)"
] | [
"0.5892514",
"0.5680063",
"0.5646794",
"0.5550604",
"0.5464335",
"0.5430682",
"0.5372216",
"0.5369034",
"0.53668135",
"0.53542835",
"0.5335564",
"0.5326624",
"0.53066283",
"0.5276022",
"0.5273535",
"0.52497715",
"0.5233608",
"0.5233608",
"0.5214551",
"0.5214118",
"0.52107763",
"0.5200044",
"0.51917666",
"0.5191017",
"0.51906574",
"0.518239",
"0.51757336",
"0.5162156",
"0.5157345",
"0.50720674"
] | 0.63345224 | 0 |
Checks globals() and builtins for the existence of the object name (used for StuWareSoftSystems' bootstrap) | def checkObjectInNameSpace(objectName):
if objectName is None or not isinstance(objectName, basestring) or objectName == u"": return False
if objectName in globals(): return True
return objectName in dir(builtins) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def isbuiltin(object):\n if inspect.isbuiltin(object):\n return True\n\n return getattr(object, '__module__', None) == 'builtins'",
"def is_builtin_name(name):\r\n if name.startswith('__') and name.endswith('__'):\r\n return ALL_LOWER_CASE_RE.match(name[2:-2]) is not None\r\n return False",
"def global_exists(self, global_name):\n return self.evaluate('!(typeof %s === \"undefined\");' %\n global_name)",
"def testFindsBuiltins(self):\r\n self.assertEqual('sys', modulefinder.get_module_filename('sys'))\r\n self.assertEqual('time', modulefinder.get_module_filename('time'))",
"def has_global(node, name):\n return hasattr(node, \"globals\") and name in node.globals",
"def is_builtins(self) -> bool:\n return self.source.startswith(self.builtins_import_string)",
"def isbuiltin(object):\r\n return isinstance(object, types.BuiltinFunctionType)",
"def _is_in_stdlib(module, some_object):\n # Clear PYTHONPATH temporarily and try importing the given module.\n original_sys_path = sys.path\n lib_path = os.path.dirname(traceback.__file__)\n sys.path = [lib_path]\n\n # On Mac, some extra library paths are required.\n if 'darwin' in platform.system().lower():\n for path in original_sys_path:\n if 'site-packages' not in path:\n sys.path.append(path)\n\n in_stdlib = False\n\n try:\n module = importlib.import_module(module)\n\n if some_object:\n getattr(module, some_object)\n\n in_stdlib = True\n except (ImportError, AttributeError):\n pass\n\n sys.path = original_sys_path\n\n return in_stdlib",
"def test_core_object_types_global():\n for core_object_type in CORE_OBJECT_TYPES:\n core_object = get_object_from_string(core_object_type)\n assert core_object.__name__.lower() == core_object_type",
"def test_global():\n global PATH, OS, collections, deque\n from os import path as PATH\n import os as OS\n import collections\n from collections import deque\n # make sure that these triggers unused-variable\n from sys import platform\n from sys import version as VERSION\n import this\n import re as RE",
"def global_check(self):\n return None",
"def is_ipython():\n return 'get_ipython' in globals()",
"def _validate_builtin(_):\n pass",
"def in_global_code(self):\n return self.sscope is None and self.lscope is None",
"def __contains__(name):",
"def is_builtin(fn) -> bool:\n return getattr(fn, TRITON_BUILTIN, False)",
"def register(self, name, obj):\r\n self.eval_allowed_globals[name] = obj",
"def _is_exported_name(name):\n # If ``check`` ever switches to using the ``__all__`` mechanism, update this code:\n return not name.startswith(\"_\")",
"def ignore_builtin_verification():\n return not current_space().skip_builtin_verification",
"def ioc(globals):\n\tfrom Module.Shapes.ShapeFactory import shape_factory\n\tglobals['shape_factory'] = shape_factory\n\tfrom Module.Lighting.Colors import Colors\n\tglobals['Colors'] = Colors",
"def is_top_level_function(obj: Any) -> bool:\r\n return callable(obj) and obj.__name__ in sys.modules[obj.__module__].__dict__",
"def is_builtin_type(tp):\n return hasattr(__builtins__, tp.__name__) and tp is getattr(__builtins__, tp.__name__)",
"def is_mobu():\n\n return 'pyfbsdk' in main.__dict__",
"def get_builtin(name):\n t = getattr(builtins, name)\n if isinstance(t, type):\n return t\n raise ValueError(name)",
"def check_for_underscore(self):\n # If something injected a '_' variable in __builtin__, delete\n # ipython's automatic one so we don't clobber that. gettext() in\n # particular uses _, so we need to stay away from it.\n if '_' in __builtin__.__dict__:\n try:\n del self.shell.user_ns['_']\n except KeyError:\n pass",
"def test_if_ipython():\n try:\n return __IPYTHON__\n except NameError:\n return False",
"def dispatch_commands(_globals, _name_):\n try:\n argh.dispatch_commands([\n v for k, v in _globals.items()\n if isinstance(v, types.FunctionType)\n and v.__module__ == _name_\n and not k.startswith('_')\n and k != 'main'\n ])\n except KeyboardInterrupt:\n sys.exit(1)",
"def register(obj_name, obj):\n if obj_name not in ninja_globals['register']:\n ninja_globals['register'][obj_name] = obj",
"def test_swift_globals(self):\n self.build()\n self.do_test()",
"def test_molecool_imported():\n assert \"molecool\" in sys.modules"
] | [
"0.6845686",
"0.64684844",
"0.64186686",
"0.63405824",
"0.6296543",
"0.6213715",
"0.60902375",
"0.60629964",
"0.60133064",
"0.5856874",
"0.5762603",
"0.57043445",
"0.56223327",
"0.56172764",
"0.55805594",
"0.5576068",
"0.5568513",
"0.5556084",
"0.55383843",
"0.5527319",
"0.5527259",
"0.55162597",
"0.5487715",
"0.54780805",
"0.5468831",
"0.5442787",
"0.5433667",
"0.5423802",
"0.541526",
"0.54051715"
] | 0.7423229 | 0 |
Pass a string in the format 'x.x.x'. Will check that this MacOSX version is at least that version. The 3rd micro number is optional | def isOSXVersionAtLeast(compareVersion):
# type: (basestring) -> bool
try:
if not Platform.isOSX(): return False
def convertVersion(convertString):
_os_major = _os_minor = _os_micro = 0
_versionNumbers = []
for versionPart in StringUtils.splitIntoList(convertString, '.'):
strippedPart = StringUtils.stripNonNumbers(versionPart, '.')
if (StringUtils.isInteger(strippedPart)):
_versionNumbers.append(Integer.valueOf(Integer.parseInt(strippedPart)))
else:
_versionNumbers.append(0)
if len(_versionNumbers) >= 1: _os_major = max(0, _versionNumbers[0])
if len(_versionNumbers) >= 2: _os_minor = max(0, _versionNumbers[1])
if len(_versionNumbers) >= 3: _os_micro = max(0, _versionNumbers[2])
return _os_major, _os_minor, _os_micro
os_major, os_minor, os_micro = convertVersion(System.getProperty("os.version", "0.0.0"))
myPrint("DB", "MacOS Version number(s): %s.%s.%s" %(os_major, os_minor, os_micro))
if not isinstance(compareVersion, basestring) or len(compareVersion) < 1:
myPrint("B", "ERROR: Invalid compareVersion of '%s' passed - returning False" %(compareVersion))
return False
chk_os_major, chk_os_minor, chk_os_micro = convertVersion(compareVersion)
myPrint("DB", "Comparing against Version(s): %s.%s.%s" %(chk_os_major, chk_os_minor, chk_os_micro))
if os_major < chk_os_major: return False
if os_major > chk_os_major: return True
if os_minor < chk_os_minor: return False
if os_minor > chk_os_minor: return True
if os_micro < chk_os_micro: return False
return True
except:
myPrint("B", "ERROR: isOSXVersionAtLeast() failed - returning False")
dump_sys_error_to_md_console_and_errorlog()
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def check_from_version(version: str) -> str:\n version_int = [int(v) for v in version.split(\".\")]\n if version_int[0] not in PipetteModelMajorVersion:\n raise ValueError(f\"Major version {version_int[0]} is not supported.\")\n if version_int[1] not in PipetteModelMinorVersion:\n raise ValueError(f\"Minor version {version_int[1]} is not supported.\")\n return version",
"def _is_python_version(s: str) -> bool:\n\n return s.startswith(\"2\") or s.startswith(\"3\")",
"def check_version_str(version):\n if not version.startswith('v') and version != 'current':\n version = 'v%s' % version\n return version",
"def is_stable_version(version):\n if not isinstance(version, tuple):\n version = version.split('.')\n last_part = version[-1]\n\n if not re.search('[a-zA-Z]', last_part):\n return True\n else:\n return False",
"def __check_nm_version(self):\n try:\n proxy = self.bus.get_object(\n self.system_service_name, \"/org/freedesktop/NetworkManager\")\n props = dbus.Interface(proxy, \"org.freedesktop.DBus.Properties\")\n version = props.Get(\"org.freedesktop.NetworkManager\", \"Version\")\n except dbus.exceptions.DBusException:\n version = \"0.8\"\n if re.match(r'^1\\.', version):\n self.nm_version = \"1.0\"\n return\n if re.match(r'^0\\.9', version):\n self.nm_version = \"0.9\"\n return\n if re.match(r'^0\\.8', version):\n self.nm_version = \"0.8\"\n return\n self.nm_version = Messages.unknown_version",
"def check_python_version(match, current=None):\n if current is None:\n current = list(sys.version_info[:3])\n if not isinstance(match, list):\n match = [match]\n for m in match:\n minimal = False\n if isinstance(m, float):\n m = str(m)\n if m.endswith(\"+\"):\n minimal = True\n m = m[:-1]\n # assert m[0].isdigit()\n # assert m[-1].isdigit()\n m = [int(x) for x in m.split(\".\")]\n current_len = current[: len(m)]\n # print(m, current, current_len)\n if minimal:\n if current_len >= m:\n return True\n else:\n if current_len == m:\n return True\n return False",
"def is_new_osx():\n name = distutils.util.get_platform()\n if sys.platform != \"darwin\":\n return False\n elif name.startswith(\"macosx-10\"):\n minor_version = int(name.split(\"-\")[1].split(\".\")[1])\n if minor_version >= 7:\n return True\n else:\n return False\n else:\n return False",
"def test_osx_version_number_value(self):\n \n running_version_number = get_osx_version()[0]\n \n # Check to make sure the returned valued is 10.11.1\n self.assertEqual(running_version_number, '10.11.1')",
"def version_check(version):\n return {\n 1: 'OF10', # 0x01 -> OF1.0\n 3: 'OF12', # 0x03 -> OF1.2\n 4: 'OF13', # 0x04 -> OF1.3\n 5: 'OF14', # 0x05 -> OF1.4\n 6: 'OF15', # 0x06 -> OF1.5\n }.get(version, 0)",
"def os_is_compatible(required_os_version: str) -> bool:\n\tcurrent_version = [int(c) for c in os_release().split('.')]\n\trequired_version = [int(c) for c in required_os_version.split('.')]\n\n\t# 10.13.6.2 is not (necessarily) compatible with 10.13.6\n\tif len(required_version) > len(current_version) and\\\n\t required_version[0:len(current_version)] == current_version:\n\t return False\n\n\t# Compare versions component-wise\n\tfor (c, r) in zip(current_version, required_version):\n\t\tif c < r:\n\t\t\treturn False\n\n\treturn True",
"def testStratisVersion(self):\n version = Manager.Properties.Version.Get(get_object(TOP_OBJECT))\n (major, _, _) = version.split(\".\")\n self.assertEqual(major, \"0\")",
"def check_pythonver(reqver_text):\n\treqver = map(int, reqver_text.split('.'))\n\tpythonver = sys.version_info[:3]\n\treturn check_ver(pythonver, reqver)",
"def test_version():\n versions = ((2, 7, 16), (3, 5, 7), (3, 6, 8), (3, 7, 3))\n assert sys.version_info[:3] in versions",
"def version_major_minor(version_string):\n return '.'.join(version_string.split('.')[0:2])",
"def check_version_is_supported(name, version, min_version, help=''):\n if (pkg_resources.parse_version(version) <\n pkg_resources.parse_version(min_version)):\n # Version is too old.\n print('ERROR: Unsupported %s version: %s (minimum %s).%s' %\n (name, version, min_version, (' %s' % help) if help else ''),\n file=sys.stderr)\n exit(1)",
"def check_if_version_supports_restricted(operator_version):\n try:\n the_version = operator_version.split(\"-\")[0]\n\n parts = the_version.split(\".\")\n if int(parts[0]) < 6:\n return False\n if int(parts[0]) >= 7:\n return True\n if int(parts[1]) > 2:\n return True\n if int(parts[1]) < 2:\n return False\n return int(parts[2]) >= 18\n # pylint: disable=W0703\n except Exception:\n logger.info(\"issues parsing version %s\", operator_version)\n return True",
"def verify_ios_versionNumber():\r\n msg = \"\"\r\n try:\r\n 'Getting Version number for IOS '\r\n if g.platform == 'ios':\r\n text_view = ui_controls.text_view(get_obj_identifier('about_versionNumber_lbl'), label=True)\r\n\r\n 'Verifying whether Version number is matching with expected value IOS'\r\n if g.platform == 'ios' and text_view.strip() == g.version_number :\r\n print \"Version number is verified successfully. Expected : %s. Actual : %s\" % (g.version_number,text_view.strip())\r\n else:\r\n if g.platform == 'ios':\r\n print \"Version number is not verified successfully. Expected : %s. Actual : %s\" % (g.version_number, text_view.strip())\r\n return False, msg\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n return True, msg",
"def test__get_program_version():\n version = util._get_program_version(\"midgard\")\n assert isinstance(version, str) and re.search(\"[0-9]\", version)",
"def python_version_check():\n min_version_list = PYTHON_MIN_VERSION.split(\".\")\n # Truncate if the list is more the 4 items\n if len(min_version_list) > 4:\n min_version_list = min_version_list[:4]\n # Fill if the list is less then 4 items\n if len(min_version_list) == 1:\n min_version_list.append(\"0\")\n if len(min_version_list) == 2:\n min_version_list.append(\"0\")\n if len(min_version_list) == 3:\n min_version_list.append(\"f0\")\n # Calculate the minimum version and an integer, which, when displayed as\n # hex, is easily recognised as the version. E.g. 0x30502f0 is 3.5.2\n min_version_value = 0\n for index, item in enumerate(min_version_list[::-1]):\n min_version_value = min_version_value + int(item, 16) * 2**(index * 8)\n if debug: print(\"Python Version Minimum:{}, Decimal:{}, Hex:{}\"\n .format(PYTHON_MIN_VERSION, min_version_value,\n hex(min_version_value)))\n # test value and exit if below minimum revision\n if sys.hexversion < min_version_value:\n print(\"Python Version: {}. Required minimum version is: {}. Exiting...\"\n .format(sys.version.split(\" \")[0], PYTHON_MIN_VERSION))\n sys.exit()",
"def test_osx_version_number_type(self):\n \n running_version_number = get_osx_version()[0]\n \n # Check to make sure the returned valued is a string\n self.assertEqual(type(running_version_number), str)",
"def test_major(scraper, version_parts):\n\n new_version_parts = list(version_parts)\n new_version_parts[0] = int(new_version_parts[0]) + 1\n\n assert scraper.is_compatible_with(generate_version(new_version_parts)) is False",
"def check_py_version(self, cur_version):\n\n # convert cur_version to string, in case of erroneous type being passed\n cur_version = str(cur_version)\n\n acceptable_python_versions_regex = r\"(^(2\\.[6-9])(\\.?\\d{1,2})?$)|(^(3\\.[3-9])(\\.?\\d{1,2})?$)\"\n pyversions_regex_compiled = re.compile(acceptable_python_versions_regex)\n pyversions_match = pyversions_regex_compiled.match(cur_version)\n\n # If match is found, return True. If no match, return False\n if pyversions_match:\n return True\n else:\n return False",
"def check_version(ctx, _, value):\n if not value or ctx.resilient_parsing:\n return\n\n click.echo(f\"geocube v{importlib.metadata.version('geocube')}\")\n\n ctx.exit()",
"def is_version_2_6() -> bool:\n v = get_version()\n if v[1] != \"singularity\" and v[1] != \"singularity-ce\":\n return False\n return v[0][0] == 2 and v[0][1] == 6",
"def get_version():\n # this implementation avoids calling Foundation and will work on\n # non Apple OSes.\n vers = \"UNKNOWN\"\n build = \"\"\n # find the munkilib directory, and the version file\n munkilibdir = os.path.dirname(os.path.abspath(__file__))\n versionfile = os.path.join(munkilibdir, \"version.plist\")\n if os.path.exists(versionfile):\n try:\n vers_plist = readPlist(versionfile)\n except (IOError, OSError, ExpatError):\n pass\n else:\n try:\n vers = vers_plist['CFBundleShortVersionString']\n build = vers_plist['BuildNumber']\n except KeyError:\n pass\n if build:\n vers = vers + \".\" + build\n return vers",
"def test_versionString(self):\n self.assertIn(\"%d.%d.%d\" % nevow.__version_info__, nevow.__version__)",
"def test_major(self):\n self.assertEqual(\"0\", self._version1.major())\n self.assertEqual(\"1.2\", self._version2.major())",
"def _validate_os(module):\n rc, out, err = module.run_command(['cat', '/etc/os-release'])\n\n # Validate for a BSD string in output\n if 'BSD' not in out:\n msg_err = 'Error: Unsupported OS. This can only be used on BSD systems.'\n module.fail_json(msg=msg_err)",
"def is_version_3_1_or_newer() -> bool:\n if is_apptainer_1_or_newer():\n return True # this is equivalent to singularity-ce > 3.9.5\n v = get_version()\n return v[0][0] >= 4 or (v[0][0] == 3 and v[0][1] >= 1)",
"def test_valid_hh_version():\n # TODO: Basically only enforcing correct main segment, since not using `re.fullmatch`\n # TODO: Probably want `re.fullmatch` here - Currently ignoring any potentially invalid suffix\n version_pattern = r\"^[0-9]+\\.[0-9]+\\.[0-9]+(|a[0-9]|b[0-9]|rc[0-9])\"\n res = re.match(version_pattern, hh.__version__)\n assert res is not None"
] | [
"0.6574004",
"0.6253473",
"0.6247122",
"0.6238033",
"0.6165214",
"0.6156912",
"0.612077",
"0.60846204",
"0.60516804",
"0.6008613",
"0.5996625",
"0.5967177",
"0.59521145",
"0.59322596",
"0.5885311",
"0.5882469",
"0.58820486",
"0.5845207",
"0.5835244",
"0.5812287",
"0.580141",
"0.5753324",
"0.5741814",
"0.57388365",
"0.5727548",
"0.5719305",
"0.5712523",
"0.570974",
"0.5707202",
"0.56917536"
] | 0.62811625 | 1 |
Detect Intel x86 32bit system | def isIntelX86_32bit():
return String(System.getProperty("os.arch", "null").strip()).toLowerCase(Locale.ROOT) == "x86" | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_32bit(self):\n return self.machine in ['i386', 'i586', 'i686']",
"def osarch_is_32_bit():\n return osarch_match(\"32-bit\")",
"def osarch_is_ia32():\n return osarch_match(\"ia32\")",
"def host_arch_win():\n\n arch = os.environ.get('PROCESSOR_ARCHITECTURE', 'x86')\n\n matchup = {\n 'AMD64' : 'x64',\n 'x86' : 'ia32',\n 'arm' : 'arm',\n }\n\n return matchup.get(arch, 'ia32')",
"def host_arch_win():\n\n arch = os.environ.get('PROCESSOR_ARCHITECTURE', 'x86')\n\n matchup = {\n 'AMD64': 'x64',\n 'x86': 'ia32',\n 'arm': 'arm',\n }\n\n return matchup.get(arch, 'ia32')",
"def is_64_windows():\n return 'PROGRAMFILES(X86)' in os.environ",
"def is_64_windows(self):\n return 'PROGRAMFILES(X86)' in os.environ",
"def get_os_name(x86=0):\r\n platform_in_short, on_win = sys.platform[:3], 0\r\n\r\n if platform_in_short == \"win\":\r\n on_win = 1\r\n os_name = \"nt\"\r\n elif platform_in_short == \"lin\":\r\n os_name = \"lin\"\r\n else:\r\n os_name = \"sol\"\r\n if not x86:\r\n os_name += \"64\"\r\n return on_win, os_name",
"def get_os_name(x86=0):\r\n platform_in_short, on_win = sys.platform[:3], 0\r\n\r\n if platform_in_short == \"win\":\r\n on_win = 1\r\n os_name = \"nt\"\r\n elif platform_in_short == \"lin\":\r\n os_name = \"lin\"\r\n else:\r\n os_name = \"sol\"\r\n if not x86:\r\n os_name += \"64\"\r\n return on_win, os_name",
"def host_arch_cc():\n\n k = cc_macros()\n\n matchup = {\n '__x86_64__' : 'x64',\n '__i386__' : 'ia32',\n '__arm__' : 'arm',\n }\n\n rtn = 'ia32' # default\n\n for i in matchup:\n if i in k and k[i] != '0':\n rtn = matchup[i]\n break\n\n return rtn",
"def bitness():\n # see https://docs.python.org/2/library/platform.html#platform.architecture\n return '64-bit' if sys.maxsize > 2**32 else '32-bit'",
"def osarch_is_amd64():\n return osarch_match(\"amd64\")",
"def host_arch_cc():\n\n k = cc_macros()\n\n matchup = {\n '__x86_64__': 'x64',\n '__i386__': 'ia32',\n '__arm__': 'arm',\n }\n\n rtn = 'ia32' # default\n\n for i in matchup:\n if i in k and k[i] != '0':\n rtn = matchup[i]\n break\n\n return rtn",
"def architecture(self):\n return self.random.choice([\n 'x86_64', \n 'x86'\n ])",
"def is_64bit(self):\n return self.machine == 'x86_64'",
"def test_cpu_architecture_value(self):\n \n cpu_architecture = get_cpu_information()[0]\n \n # Check to make sure the returned value is \"x86_64\"\n self.assertEqual(cpu_architecture, 'x86_64')",
"def osarch_is_64_bit():\n return osarch_match(\"64-bit\")",
"def usefulFunction():\n print(platform.uname()) # Yay it told me about my computer - no idea what it means but thats cool",
"def on_powerpc():\n return processor() == 'powerpc' or machine().startswith('ppc')",
"def is_vserver_kernel():\n\n kinfo = commands.getoutput('/bin/uname -a').split()[2]\n return '-vs' in kinfo",
"def architecture():\n if is_darwin:\n # Darwin's platform.architecture() is buggy and always\n # returns \"64bit\" event for the 32bit version of Python's\n # universal binary. So we roll out our own (that works\n # on Darwin).\n if sys.maxsize > 2 ** 32:\n return '64bit'\n else:\n return '32bit'\n else:\n return platform.architecture()[0]",
"def usefulFunction():\n print(platform.uname()) #displayed this computer's specifications",
"def setosvariablesx86():\n\tKPROCESS = ''\n\tAPLINKS = ''\n\tUPID = ''\n\tTOKEN = ''\n\tversion = sys.getwindowsversion()\n\n\tif((version.major == 5) and (version.minor == 1) and ('3' in version.service_pack)):\n\t\t# the target machine's OS is Windows XP SP3\n\t\tprint \"[*] OS version: Windows XP SP3\"\n\t\tKPROCESS = '\\x44'\n\t\tTOKEN\t= '\\xC8'\n\t\tUPID\t = '\\x84'\n\t\tAPLINKS = '\\x88'\n \n\telif((version.major == 5) and (version.minor == 2) and ('2' in version.service_pack)):\n\t\t# the target machine's OS is Windows Server 2003 SP2\n\t\tprint \"[*] OS version: Windows Server 2003 SP2\"\n\t\tKPROCESS = '\\x38'\n\t\tTOKEN\t= '\\xD8'\n\t\tUPID\t = '\\x94'\n\t\tAPLINKS = '\\x98'\n \n\telif((version.major == 6) and (version.minor == 0) and ('1' in version.service_pack or '2' in version.service_pack) and (version.product_type == VER_NT_WORKSTATION)):\n\t\t# the target machine's OS is Windows Vista SP1 / SP2\n\t\tprint \"[*] OS version: Windows Vista SP1 / SP2\"\n\t\tKPROCESS = '\\x48'\n\t\tTOKEN\t= '\\xE0'\n\t\tUPID\t = '\\x9C'\n\t\tAPLINKS = '\\xA0'\n \n\telif((version.major == 6) and (version.minor == 0) and ('1' in version.service_pack or '2' in version.service_pack) and (version.product_type != VER_NT_WORKSTATION)):\n\t\t# the target machine's OS is Windows Server 2008 / SP2\n\t\tprint \"[*] OS version: Windows Server 2008 / SP2\"\n\t\tKPROCESS = '\\x48'\n\t\tTOKEN\t= '\\xE0'\n\t\tUPID\t = '\\x9C'\n\t\tAPLINKS = '\\xA0'\n \n\telif((version.major == 6) and (version.minor == 1)):\n\t\t# the target machine's OS is Windows 7 / SP1\n\t\tprint \"[*] OS version: Windows 7 / SP1\"\n\t\tKPROCESS = '\\x50'\n\t\tTOKEN\t= '\\xF8'\n\t\tUPID\t = '\\xB4'\n\t\tAPLINKS = '\\xB8'\n\t\n\telse:\n\t\tprint \"[-] No matching OS version, exiting...\"\n\t\tsys.exit(-1)\n\t\n\treturn (KPROCESS,APLINKS,UPID,TOKEN)",
"def is64bit(self):\n return platform.machine().endswith('64')",
"def is_64bit():\n is64bit = sys.maxsize > 2 ** 32\n if sys.platform == \"cli\":\n is64bit = sys.executable.endswith(\"ipy64.exe\")\n return is64bit",
"def get_arch():\n arch = platform.machine()\n if arch == \"i686\":\n return \"i686\"\n elif arch == \"x86_64\":\n return \"x86_64\"\n elif arch == \"aarch64\":\n return \"aarch64\"\n else:\n return \"unknown\"",
"def get_platform_architecture() -> None:\n global _PLATFORM, _ARCHITECTURE, _COMPRESSION\n\n x86_64 = {\"x86_64\", \"amd64\", \"AMD64\", \"64bit\"}\n i386 = {\"i386\", \"i486\", \"i586\", \"i686\", \"386\", \"x86\", \"32bit\"}\n\n system = platform.system()\n if system == \"Windows\":\n machine = platform.machine()\n else:\n machine = os.uname().machine\n\n if system == \"Linux\":\n _PLATFORM = \"linux\"\n if machine in x86_64:\n _ARCHITECTURE = \"64\"\n elif machine in i386:\n _ARCHITECTURE = \"32\"\n else:\n _ARCHITECTURE = \"other\"\n\n elif system in {\"OpenBSD\", \"NetBSD\", \"FreeBSD\"}:\n _PLATFORM = \"bsd\"\n _ARCHITECTURE = \"other\"\n if system == \"FreeBSD\":\n if machine in x86_64:\n if detect_freebsd_linux_compatibility(\"64\"):\n _PLATFORM = \"linux\"\n _ARCHITECTURE = \"64\"\n elif machine in i386:\n if detect_freebsd_linux_compatibility(\"32\"):\n _PLATFORM = \"linux\"\n _ARCHITECTURE = \"32\"\n\n elif system in {\"Haiku\", \"Hurd\"}:\n _PLATFORM = \"linux\"\n _ARCHITECTURE = \"other\"\n\n elif system == \"Darwin\":\n _PLATFORM = \"mac\"\n _ARCHITECTURE = \"os\"\n elif system == \"Windows\":\n _PLATFORM = \"win\"\n if machine in x86_64:\n _ARCHITECTURE = \"64\"\n elif machine in i386:\n _ARCHITECTURE = \"32\"\n if not all([_PLATFORM, _ARCHITECTURE]):\n raise PlatformError(f\"Failed to detect appropriate platform. {system} {machine}\")\n\n if _PLATFORM == \"win\":\n _COMPRESSION = \"zip\"\n else:\n _COMPRESSION = \"tar.gz\"",
"def isOnNao():\n szCpuInfo = \"/proc/cpuinfo\";\n if not os.path.exists( szCpuInfo ): # already done by the getFileContents\n return False;\n szAllFile = getFileContents( szCpuInfo, bQuiet = True );\n if( szAllFile.find( \"Geode\" ) == -1 and szAllFile.find( \"Intel(R) Atom(TM)\" ) == -1 ):\n return False;\n return True;",
"def test_os_processor(self):\n self.assertEqual(self.settings.OS_PROCESSOR, platform.processor())",
"def processor():\n return uname().processor"
] | [
"0.81732696",
"0.80353045",
"0.7400126",
"0.6873196",
"0.6867948",
"0.6770266",
"0.6644437",
"0.6488959",
"0.6488959",
"0.6432496",
"0.6342804",
"0.63230413",
"0.6291553",
"0.62735385",
"0.6245272",
"0.615874",
"0.6148934",
"0.6148358",
"0.613531",
"0.5986601",
"0.5973497",
"0.59698814",
"0.59559375",
"0.59020966",
"0.58889675",
"0.58845216",
"0.58734906",
"0.58576053",
"0.58499914",
"0.5842323"
] | 0.8507626 | 0 |
Grabs the MD defaultText font, reduces default size down to below 18, sets UIManager defaults (if runtime extension, will probably error, so I catch and skip) | def setDefaultFonts():
if MD_REF_UI is None: return
# If a runtime extension, then this may fail, depending on timing... Just ignore and return...
try:
myFont = MD_REF.getUI().getFonts().defaultText
except:
myPrint("B","ERROR trying to call .getUI().getFonts().defaultText - skipping setDefaultFonts()")
return
if myFont is None:
myPrint("B","WARNING: In setDefaultFonts(): calling .getUI().getFonts().defaultText has returned None (but moneydance_ui was set) - skipping setDefaultFonts()")
return
if myFont.getSize()>18:
try:
myFont = myFont.deriveFont(16.0)
myPrint("B", "I have reduced the font size down to point-size 16 - Default Fonts are now set to: %s" %(myFont))
except:
myPrint("B","ERROR - failed to override font point size down to 16.... will ignore and continue. Font set to: %s" %(myFont))
else:
myPrint("DB", "Attempting to set default font to %s" %myFont)
try:
UIManager.getLookAndFeelDefaults().put("defaultFont", myFont )
# https://thebadprogrammer.com/swing-uimanager-keys/
UIManager.put("CheckBoxMenuItem.acceleratorFont", myFont)
UIManager.put("Button.font", myFont)
UIManager.put("ToggleButton.font", myFont)
UIManager.put("RadioButton.font", myFont)
UIManager.put("CheckBox.font", myFont)
UIManager.put("ColorChooser.font", myFont)
UIManager.put("ComboBox.font", myFont)
UIManager.put("Label.font", myFont)
UIManager.put("List.font", myFont)
UIManager.put("MenuBar.font", myFont)
UIManager.put("Menu.acceleratorFont", myFont)
UIManager.put("RadioButtonMenuItem.acceleratorFont", myFont)
UIManager.put("MenuItem.acceleratorFont", myFont)
UIManager.put("MenuItem.font", myFont)
UIManager.put("RadioButtonMenuItem.font", myFont)
UIManager.put("CheckBoxMenuItem.font", myFont)
UIManager.put("OptionPane.buttonFont", myFont)
UIManager.put("OptionPane.messageFont", myFont)
UIManager.put("Menu.font", myFont)
UIManager.put("PopupMenu.font", myFont)
UIManager.put("OptionPane.font", myFont)
UIManager.put("Panel.font", myFont)
UIManager.put("ProgressBar.font", myFont)
UIManager.put("ScrollPane.font", myFont)
UIManager.put("Viewport.font", myFont)
UIManager.put("TabbedPane.font", myFont)
UIManager.put("Slider.font", myFont)
UIManager.put("Table.font", myFont)
UIManager.put("TableHeader.font", myFont)
UIManager.put("TextField.font", myFont)
UIManager.put("Spinner.font", myFont)
UIManager.put("PasswordField.font", myFont)
UIManager.put("TextArea.font", myFont)
UIManager.put("TextPane.font", myFont)
UIManager.put("EditorPane.font", myFont)
UIManager.put("TabbedPane.smallFont", myFont)
UIManager.put("TitledBorder.font", myFont)
UIManager.put("ToolBar.font", myFont)
UIManager.put("ToolTip.font", myFont)
UIManager.put("Tree.font", myFont)
UIManager.put("FormattedTextField.font", myFont)
UIManager.put("IconButton.font", myFont)
UIManager.put("InternalFrame.optionDialogTitleFont", myFont)
UIManager.put("InternalFrame.paletteTitleFont", myFont)
UIManager.put("InternalFrame.titleFont", myFont)
except:
myPrint("B","Failed to set Swing default fonts to use Moneydance defaults... sorry")
myPrint("DB",".setDefaultFonts() successfully executed...")
return | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_default_font(cls):\n if platform.system() == \"Linux\":\n for family in (\"DejaVu Sans\", \"Noto Sans\", \"Nimbus Sans\"):\n if family in tk.font.families():\n logger.debug(\"Setting default font to: '%s'\", family)\n tk.font.nametofont(\"TkDefaultFont\").configure(family=family)\n tk.font.nametofont(\"TkHeadingFont\").configure(family=family)\n tk.font.nametofont(\"TkMenuFont\").configure(family=family)\n break\n return tk.font.nametofont(\"TkDefaultFont\").configure()[\"family\"]",
"def defaultFont(self, p_int=None): # real signature unknown; restored from __doc__ with multiple overloads\r\n pass",
"def GetDefaultFont(self):\n return wx.Font(10, wx.MODERN, wx.NORMAL, wx.NORMAL)",
"def loadDefaultFont(size):\n\n try:\n f = pygame.font.Font(None,size)\n except error, message:\n print \"Cannot load the default font\"\n raise SystemExit, message\n return f",
"def set_font_size(*args):\n size = font_size.get()\n message_inp.configure(font=f'TKDefault {size}')",
"def setHardwareFont():\n dislin.hwfont()",
"def setTTFont(font='default'):\n if font == 'default':\n font = 'Times New Roman' \n dislin.winfnt(font)",
"def test_configs_font(\n self):\n root = Tk()\n custom = font.Font(root, family='Helvetica', size=12)\n self.assertEqual(custom.cget('family'), 'Helvetica')\n fontSelect.font_style(custom, 'Times')\n self.assertEqual(custom.cget('family'), 'Times')\n fontSelect.font_size(custom, 18)\n self.assertEqual(custom.cget('size'), 18)",
"def set_font_family_default(self):\n font = QFont('Arial', 12)\n self.parent.setCurrentFont(font)",
"def __init__(self, font='mediumbold'):\n\tself.set_font(font)",
"def base_font(self) -> str:\n pass",
"def setDislinFont(font='default'):\n fontdict[font]()",
"def get_default_font(bold=False):\n font = _QFont()\n font.setPointSize(FONT_SIZE)\n font.setBold(bold)\n return font",
"def setFont(font='default',hardware=1):\n if font == 'default' and hardware:\n setHardwareFont()\n return\n currfmt = getFileFormat()\n if isPostscript(currfmt):\n setPSFont(font)\n elif isWMF(currfmt):\n setTTFont(font)\n else:\n setDislinFont(font)",
"def get_text_font ( self, object ):\n return self.text_font",
"def setup(theme='DarkAmber'):\r\n sg.theme(theme)\r\n\r\n headline_font = ('Arial bold', 20)\r\n font = ('Arial', 20)\r\n warning_font = ('Arial bold', 14)\r\n button_font = ('Arial', 14)\r\n\r\n return headline_font, font, warning_font, button_font",
"def get_font_options(self): # real signature unknown; restored from __doc__\n pass",
"def resetDefaults(self):\n self.client.SetFont(wx.Font(10,wx.SWISS,wx.NORMAL,wx.NORMAL))\n self.client.SetFontSizeAxis(10)\n self.client.SetFontSizeLegend(7)\n self.client.setLogScale((False,False))\n self.client.SetXSpec('auto')\n self.client.SetYSpec('auto')",
"def fontDialog(*args, FontList: bool=True, scalable: bool=True, **kwargs)->AnyStr:\n pass",
"def initDefaults(self):\n return _libsbml.TextGlyph_initDefaults(self)",
"def set_font(self, font):\n\tself.m_font = font",
"def load_font(fontSize):\n f1='/usr/share/fonts/corefonts/arialbd.ttf' \n f2='/usr/share/fonts/truetype/msttcorefonts/Arial_Bold.ttf'\n if os.path.isfile(f1): font=ImageFont.truetype(f1,fontSize)\n if os.path.isfile(f2): font=ImageFont.truetype(f2,fontSize)\n return font",
"def adjusting_fonts(self):\n fix_x = int(0 * settings.scale)\n fix_y = int(0 * settings.scale)\n font_object = self.fontA\n box = self.box\n text_box = self.box.get_size()\n text_list = self.text.split()\n number_of_words = len(text_list)\n count = 0\n height = fix_y\n first = True\n line = \"\"\n line_break = False\n while count < number_of_words:\n line += text_list[count]\n line_size = font_object.size(line)\n line_pos = int((text_box[0] + fix_x - line_size[0]) / 2)\n if line_size[0] < text_box[0]:\n if count + 1 < number_of_words:\n temporary_line = line + \" \" + text_list[count + 1]\n if font_object.size(temporary_line)[0] >= text_box[0]:\n line_image = font_object.render(line, 1, self.color)\n height += int((line_size[1] * 0.8))\n box.blit(line_image, (line_pos, height))\n line = \"\"\n else:\n line += \" \"\n elif count + 1 == number_of_words:\n height += int((line_size[1] * 0.8))\n box.blit(\n font_object.render(line, 1, self.color), (line_pos, height)\n )\n else:\n line = text_list[count]\n height += int(\n line_size[1] * 0.8\n ) # If line height is perfect it does not seem that it is the same text\n count += 1",
"def get_font_at_size(fonts_path, font_name, initial_font_size, text_to_print, target_width):\n font_size = initial_font_size\n while True:\n font = ImageFont.truetype(path.join(fonts_path, font_name), font_size)\n text_width = font.getsize(text_to_print)[0]\n if text_width <= target_width:\n break\n if font_size < 9:\n break\n font_size = font_size - 1\n return font",
"def set_font(s: Optional[int] = 14, reset: Optional[bool] = False) -> None:\n if reset:\n plt.rcParams.update(plt.rcParamsDefault)\n plt.rcParams[\"figure.figsize\"] = [20, 10]\n # plt.rcParams['font.family'] = 'serif'\n # plt.rcParams['font.serif'] = ['Times New Roman'] + plt.rcParams['font.serif']\n plt.rc('font', size=s) # controls default text sizes\n plt.rc('axes', titlesize=s) # fontsize of the axes title\n plt.rc('axes', labelsize=s) # fontsize of the x and y labels\n plt.rc('xtick', labelsize=s - 2) # fontsize of the tick labels\n plt.rc('ytick', labelsize=s - 2) # fontsize of the tick labels\n plt.rc('legend', fontsize=s) # legend fontsize\n plt.rc('figure', titlesize=s + 2) # fontsize of the figure title",
"def setCommonFonts(windows=None):\n f = setFont('fontNormal', family=cfgFontName, size=cfgFontSize)\n aliasFont('fontButton', 'fontNormal')\n fb = setFont('fontBold', family=cfgFontName, size=cfgFontSize, weight='bold')\n fi = setFont('fontItalic', family=cfgFontName, size=cfgFontSize, slant='italic')\n setFont('fontLabel', family=cfgFontName, size=cfgFontSize+1, weight='bold')\n if windows:\n windows.fontBig = tkFont.Font(size=cfgFontSize+2, family=cfgFontName, weight='bold')\n windows.font = f\n windows.fontBold = fb\n windows.fontItalic = fi",
"def font(self):\n\treturn self.m_font",
"def font(self):\n\treturn self.m_font",
"def update_editor ( self ):\n super( TextFontEditor, self ).update_editor()\n set_font( self )",
"def set_default(self):\n self.online()\n self.justify('L')\n self.inverse_off()\n self.double_height_off()\n self.set_line_height(30)\n self.bold_off()\n self.underline_off()\n self.set_barcode_height(50)\n self.set_size('s')\n self.set_charset(0)\n self.set_code_page(0)"
] | [
"0.68280387",
"0.6737032",
"0.6695929",
"0.6610507",
"0.64827216",
"0.6389932",
"0.6196747",
"0.6126894",
"0.60420203",
"0.60152656",
"0.59716773",
"0.596298",
"0.593417",
"0.5910025",
"0.5883346",
"0.58664095",
"0.58539915",
"0.5838941",
"0.58219564",
"0.58039767",
"0.578717",
"0.575832",
"0.57524735",
"0.5694457",
"0.5687482",
"0.56561804",
"0.5655132",
"0.5655132",
"0.564786",
"0.56450135"
] | 0.829493 | 0 |
sets up Client Properties for JFileChooser() to behave as required >> Mac only | def setJFileChooserParameters(_jf, lReportOnly=False, lDefaults=False, lPackagesT=None, lApplicationsT=None, lOptionsButton=None, lNewFolderButton=None):
myPrint("D", "In ", inspect.currentframe().f_code.co_name, "()")
if not Platform.isOSX(): return
if not isinstance(_jf, JFileChooser): return
_PKG = "JFileChooser.packageIsTraversable"
_APP = "JFileChooser.appBundleIsTraversable"
_OPTIONS = "JFileChooser.optionsPanelEnabled"
_NEWFOLDER = "JFileChooser.canCreateDirectories"
# JFileChooser defaults: https://violetlib.org/vaqua/filechooser.html
# "JFileChooser.packageIsTraversable" default False >> set "true" to allow Packages to be traversed
# "JFileChooser.appBundleIsTraversable" default False >> set "true" to allow App Bundles to be traversed
# "JFileChooser.optionsPanelEnabled" default False >> set "true" to allow Options button
# "JFileChooser.canCreateDirectories" default False >> set "true" to allow New Folder button
if debug or lReportOnly:
myPrint("B", "Parameters set: ReportOnly: %s, Defaults:%s, PackagesT: %s, ApplicationsT:%s, OptionButton:%s, NewFolderButton: %s" %(lReportOnly, lDefaults, lPackagesT, lApplicationsT, lOptionsButton, lNewFolderButton))
txt = ("Before setting" if not lReportOnly else "Reporting only")
for setting in [_PKG, _APP, _OPTIONS, _NEWFOLDER]: myPrint("DB", "%s: '%s': '%s'" %(pad(txt,14), pad(setting,50), _jf.getClientProperty(setting)))
if lReportOnly: return
if lDefaults:
_jf.putClientProperty(_PKG, None)
_jf.putClientProperty(_APP, None)
_jf.putClientProperty(_OPTIONS, None)
_jf.putClientProperty(_NEWFOLDER, None)
else:
if lPackagesT is not None: _jf.putClientProperty(_PKG, lPackagesT)
if lApplicationsT is not None: _jf.putClientProperty(_APP, lApplicationsT)
if lOptionsButton is not None: _jf.putClientProperty(_OPTIONS, lOptionsButton)
if lNewFolderButton is not None: _jf.putClientProperty(_NEWFOLDER, lNewFolderButton)
for setting in [_PKG, _APP, _OPTIONS, _NEWFOLDER]: myPrint("DB", "%s: '%s': '%s'" %(pad("After setting",14), pad(setting,50), _jf.getClientProperty(setting)))
return | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setFileDialogParameters(lReportOnly=False, lDefaults=False, lSelectDirectories=None, lPackagesT=None):\n\n myPrint(\"D\", \"In \", inspect.currentframe().f_code.co_name, \"()\")\n\n if not Platform.isOSX(): return\n\n _TRUE = \"true\"\n _FALSE = \"false\"\n\n _DIRS_FD = \"apple.awt.fileDialogForDirectories\" # When True you can select a Folder (rather than a file)\n _PKGS_FD = \"apple.awt.use-file-dialog-packages\" # When True allows you to select a 'bundle' as a file; False means navigate inside the bundle\n # \"com.apple.macos.use-file-dialog-packages\" # DEPRECATED since Monterrey - discovered this about MD2022.5(4090) - refer: java.desktop/sun/lwawt/macosx/CFileDialog.java\n\n # FileDialog defaults\n # \"apple.awt.fileDialogForDirectories\" default \"false\" >> set \"true\" to allow Directories to be selected\n # \"apple.awt.use-file-dialog-packages\" default \"true\" >> set \"false\" to allow access to Mac 'packages'\n\n if debug or lReportOnly:\n myPrint(\"B\", \"Parameters set: ReportOnly: %s, Defaults:%s, SelectDirectories:%s, PackagesT:%s\" % (lReportOnly, lDefaults, lSelectDirectories, lPackagesT))\n txt = (\"Before setting\" if not lReportOnly else \"Reporting only\")\n for setting in [_DIRS_FD, _PKGS_FD]: myPrint(\"DB\", \"%s: '%s': '%s'\" %(pad(txt,14), pad(setting,50), System.getProperty(setting)))\n if lReportOnly: return\n\n if lDefaults:\n System.setProperty(_DIRS_FD,_FALSE)\n System.setProperty(_PKGS_FD,_TRUE)\n else:\n if lSelectDirectories is not None: System.setProperty(_DIRS_FD, (_TRUE if lSelectDirectories else _FALSE))\n if lPackagesT is not None: System.setProperty(_PKGS_FD, (_TRUE if lPackagesT else _FALSE))\n\n for setting in [_DIRS_FD, _PKGS_FD]: myPrint(\"DB\", \"After setting: '%s': '%s'\" %(pad(setting,50), System.getProperty(setting)))\n\n return",
"def readInConfigFileDlg( self ):\n pass",
"def askOpen(parent,title='',defaultDir='',defaultFile='',wildcard='',style=wx.OPEN):\r\n defaultDir,defaultFile = [GPath(x).s for x in (defaultDir,defaultFile)]\r\n dialog = wx.FileDialog(parent,title,defaultDir,defaultFile,wildcard, style )\r\n if dialog.ShowModal() != wx.ID_OK: \r\n result = False\r\n elif style & wx.MULTIPLE:\r\n result = map(GPath,dialog.GetPaths())\r\n else:\r\n result = GPath(dialog.GetPath())\r\n dialog.Destroy()\r\n return result",
"def input_file_chooser(self):\n filename = tk.filedialog.askopenfilename()\n self._input_path_var.set(filename)",
"def pkg_app_file_chooser(self):\n filename = tk.filedialog.askopenfilename()\n self._pkg_app_path_var.set(filename)",
"def onLoad (self):\n #productive #onButton\n profprint()\n self.fileDialog = qt.QFileDialog(self.parent)\n self.fileDialog.setDirectory(slicer.modules.needlefinder.path.replace(\"NeedleFinder.py\",\"Config\"))\n self.fileDialog.options = self.fileDialog.DontUseNativeDialog\n self.fileDialog.acceptMode = self.fileDialog.AcceptOpen\n self.fileDialog.defaultSuffix = \"cfg\"\n self.fileDialog.setNameFilter(\"Configuration File (*.cfg)\")\n self.fileDialog.connect(\"fileSelected(QString)\", self.onLoadFileSelected)\n self.fileDialog.show()",
"def __init__(self, parent):\n super(CookiesConfigurationDialog, self).__init__(parent)\n self.setupUi(self)\n \n self.__mw = parent\n \n jar = self.__mw.cookieJar()\n acceptPolicy = jar.acceptPolicy()\n if acceptPolicy == CookieJar.AcceptAlways:\n self.acceptCombo.setCurrentIndex(0)\n elif acceptPolicy == CookieJar.AcceptNever:\n self.acceptCombo.setCurrentIndex(1)\n elif acceptPolicy == CookieJar.AcceptOnlyFromSitesNavigatedTo:\n self.acceptCombo.setCurrentIndex(2)\n \n keepPolicy = jar.keepPolicy()\n if keepPolicy == CookieJar.KeepUntilExpire:\n self.keepUntilCombo.setCurrentIndex(0)\n elif keepPolicy == CookieJar.KeepUntilExit:\n self.keepUntilCombo.setCurrentIndex(1)\n \n self.filterTrackingCookiesCheckbox.setChecked(\n jar.filterTrackingCookies())\n \n msh = self.minimumSizeHint()\n self.resize(max(self.width(), msh.width()), msh.height())",
"def __init__(self, parent=None):\n super(E5ComboPathPicker, self).__init__(parent, useLineEdit=False)",
"def onLoad (self):\r\n # productive #onButton\r\n profprint()\r\n self.dirDialog = qt.QFileDialog(self.parent)\r\n self.dirDialog.setDirectory(slicer.modules.needlefinder.path.replace(\"NeedleFinder.py\", \"Config\"))\r\n self.dirDialog.options = self.dirDialog.DontUseNativeDialog\r\n self.dirDialog.acceptMode = self.dirDialog.AcceptOpen\r\n self.dirDialog.defaultSuffix = \"cfg\"\r\n self.dirDialog.setNameFilter(\"Configuration File (*.cfg)\")\r\n self.dirDialog.connect(\"fileSelected(QString)\", self.onLoadFileSelected)\r\n self.dirDialog.show()",
"def __showPathPickerDialog(self):\n if self.__mode == E5PathPickerModes.NoMode:\n return\n \n if self.__mode == E5PathPickerModes.CustomMode:\n self.pickerButtonClicked.emit()\n return\n \n self.aboutToShowPathPickerDialog.emit()\n \n windowTitle = self.__windowTitle\n if not windowTitle:\n if self.__mode == E5PathPickerModes.OpenFileMode:\n windowTitle = self.tr(\"Choose a file to open\")\n elif self.__mode == E5PathPickerModes.OpenFilesMode:\n windowTitle = self.tr(\"Choose files to open\")\n elif self.__mode in [\n E5PathPickerModes.SaveFileMode,\n E5PathPickerModes.SaveFileEnsureExtensionMode,\n E5PathPickerModes.SaveFileOverwriteMode]:\n windowTitle = self.tr(\"Choose a file to save\")\n elif self.__mode == E5PathPickerModes.DirectoryMode:\n windowTitle = self.tr(\"Choose a directory\")\n \n directory = self._editorText()\n if not directory and self.__defaultDirectory:\n directory = self.__defaultDirectory\n if self.__mode == E5PathPickerModes.OpenFilesMode:\n directory = os.path.expanduser(directory.split(\";\")[0])\n else:\n directory = os.path.expanduser(directory)\n if not os.path.isabs(directory) and self.__defaultDirectory:\n directory = os.path.join(self.__defaultDirectory, directory)\n directory = Utilities.fromNativeSeparators(directory)\n \n if self.__mode == E5PathPickerModes.OpenFileMode:\n path = E5FileDialog.getOpenFileName(\n self,\n windowTitle,\n directory,\n self.__filters)\n path = Utilities.toNativeSeparators(path)\n elif self.__mode == E5PathPickerModes.OpenFilesMode:\n paths = E5FileDialog.getOpenFileNames(\n self,\n windowTitle,\n directory,\n self.__filters)\n path = \";\".join([Utilities.toNativeSeparators(path)\n for path in paths])\n elif self.__mode == E5PathPickerModes.SaveFileMode:\n path = E5FileDialog.getSaveFileName(\n self,\n windowTitle,\n directory,\n self.__filters,\n E5FileDialog.Options(E5FileDialog.DontConfirmOverwrite))\n path = Utilities.toNativeSeparators(path)\n elif self.__mode == E5PathPickerModes.SaveFileEnsureExtensionMode:\n path, selectedFilter = E5FileDialog.getSaveFileNameAndFilter(\n self,\n windowTitle,\n directory,\n self.__filters,\n None,\n E5FileDialog.Options(E5FileDialog.DontConfirmOverwrite))\n path = Utilities.toNativeSeparators(path)\n if path:\n ext = QFileInfo(path).suffix()\n if not ext:\n ex = selectedFilter.split(\"(*\")[1].split(\")\")[0]\n if ex:\n path += ex\n elif self.__mode == E5PathPickerModes.SaveFileOverwriteMode:\n path = E5FileDialog.getSaveFileName(\n self,\n windowTitle,\n directory,\n self.__filters)\n path = Utilities.toNativeSeparators(path)\n elif self.__mode == E5PathPickerModes.DirectoryMode:\n path = E5FileDialog.getExistingDirectory(\n self,\n windowTitle,\n directory,\n E5FileDialog.Options(E5FileDialog.ShowDirsOnly))\n path = Utilities.toNativeSeparators(path)\n while path.endswith(os.sep):\n path = path[:-1]\n elif self.__mode == E5PathPickerModes.DirectoryShowFilesMode:\n path = E5FileDialog.getExistingDirectory(\n self,\n windowTitle,\n directory,\n E5FileDialog.Options(E5FileDialog.DontUseNativeDialog))\n path = Utilities.toNativeSeparators(path)\n while path.endswith(os.sep):\n path = path[:-1]\n \n if path:\n self._setEditorText(path)\n self.pathSelected.emit(path)",
"def saveInConfigFileDlg( self ):\n pass",
"def __init__(self, parent, state, position = wx.DefaultPosition):\n ##Set up data.\n self.state = state\n modeName = MODE_LIST[self.state.GetSurface(\"Mode\")]\n wx.Dialog.__init__(self, parent, -1, \"%s Mode Settings\" %(modeName),\n pos = position,\n style = wx.DEFAULT_FRAME_STYLE ^ (wx.RESIZE_BORDER | \n wx.MINIMIZE_BOX |\n wx.MAXIMIZE_BOX)\n | wx.TAB_TRAVERSAL)\n ##Jconf pull-down menu.\n \n self.lblStBox1 = wx.StaticBox(self, -1, \"Programs to launch\" )\n ##Name Server checkbox.\n self.cbNameServer = wx.CheckBox(self, -1, \"Name Server\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.cbNameServer.SetToolTip(wx.ToolTip(\"Run Name Server at Launch\"))\n ##Conductor checkbox.\n self.cbConductor = wx.CheckBox(self, -1, \"Conductor\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.cbConductor.SetToolTip(wx.ToolTip(\"Run Conductor at Launch\"))\n ##Xplorer checkbox.\n self.cbXplorer = wx.CheckBox(self, -1, \"Xplorer\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.cbXplorer.SetToolTip(wx.ToolTip(\"Run Xplorer at Launch\"))\n ##Desktop checkbox.\n self.cbDesktop = wx.CheckBox(self, -1, \"Desktop Mode\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.cbDesktop.SetToolTip(wx.ToolTip(\"Set Desktop Mode for\" +\n \" Conductor and Xplorer\"))\n \n self.lblStBox2 = wx.StaticBox(self, -1, \"Xplorer Configuration\" )\n ##Xplorer Type radio box.\n self.rbXplorer = wx.RadioBox(self, -1, \"Mode\",\n wx.DefaultPosition, wx.DefaultSize,\n RADIO_XPLORER_LIST, 1, wx.RA_SPECIFY_ROWS)\n self.rbXplorer.SetToolTip(wx.ToolTip(\"Which Xplorer format do you\" +\n \" want to launch?\"))\n ##Cluster button.\n self.bCluster = wx.Button(self, -1, \"Cluster Settings\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.bCluster.SetToolTip(wx.ToolTip(\"Set the computers and extra\" +\n \" variables in the cluster.\"))\n ##Configuration Choice\n self.chJconf = wx.Choice(self, -1, wx.DefaultPosition, [150,-1])\n self.chJconf.SetToolTip(wx.ToolTip(\"Choose Xplorer's configuration.\"))\n ##Edit Jconf button.\n self.bEditJconf = wx.Button(self, -1, \"Edit Configuration List\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.bEditJconf.SetToolTip(wx.ToolTip(\"Edit the list of Xplorer\" +\n \" configurations.\")) \n #OK and Cancel button\n if windows:\n self.bOk = wx.Button( self, wx.ID_OK, \"OK\", wx.DefaultPosition, wx.DefaultSize, 0 )\n else:\n self.bOk = wx.Button( self, wx.ID_SAVE, \"Save\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.bCancel = wx.Button( self, wx.ID_CANCEL, \"Cancel\", wx.DefaultPosition, wx.DefaultSize, 0 )\n \n ##Bind events.\n self.Bind(wx.EVT_LISTBOX, self.Refresh, self.chJconf)\n self.Bind(wx.EVT_CHECKBOX, self.Refresh, self.cbXplorer)\n self.Bind(wx.EVT_RADIOBOX, self.Refresh, self.rbXplorer)\n self.Bind(wx.EVT_CHECKBOX, self.Refresh, self.cbConductor)\n self.Bind(wx.EVT_CHECKBOX, self.Refresh, self.cbDesktop)\n \"\"\"\n self.Bind(wx.EVT_LISTBOX, self.UpdateData, self.chJconf)\n self.Bind(wx.EVT_CHECKBOX, self.UpdateData, self.cbXplorer)\n self.Bind(wx.EVT_RADIOBOX, self.UpdateData, self.rbXplorer)\n self.Bind(wx.EVT_CHECKBOX, self.UpdateData, self.cbConductor)\n self.Bind(wx.EVT_CHECKBOX, self.UpdateData, self.cbDesktop)\n \"\"\"\n self.Bind(wx.EVT_CLOSE, self.OnClose)\n if windows:\n self.Bind(wx.EVT_BUTTON, self.OnOk, id = wx.ID_OK)\n else:\n self.Bind(wx.EVT_BUTTON, self.OnOk, id = wx.ID_SAVE)\n self.Bind(wx.EVT_BUTTON, self.EditJconf, self.bEditJconf)\n self.Bind(wx.EVT_BUTTON, self.EditCluster, self.bCluster)\n \n ##Set sizers.\n vSizerMain = wx.BoxSizer( wx.VERTICAL )\n vSizer1 = wx.BoxSizer( wx.VERTICAL )\n svSizer1 = wx.StaticBoxSizer( self.lblStBox1, wx.VERTICAL )\n svSizer1.Add( self.cbNameServer, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5 )\n hSizer1 = wx.BoxSizer( wx.HORIZONTAL )\n hSizer1.Add( self.cbConductor, 0, wx.ALIGN_CENTER|wx.ALL, 5 )\n spacer1 = wx.StaticText(self, -1, \" \", wx.DefaultPosition, wx.DefaultSize, 0 )\n hSizer1.Add( spacer1, 0, wx.ALIGN_CENTER, 5 )\n hSizer1.Add( self.cbDesktop, 0, wx.ALIGN_CENTER|wx.ALL, 5 )\n svSizer1.Add( hSizer1, 0, wx.ALIGN_CENTER_VERTICAL, 5 )\n svSizer1.Add( self.cbXplorer, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5 )\n vSizer1.Add( svSizer1, 0, wx.GROW|wx.ALIGN_CENTER_VERTICAL|wx.TOP, 5 )\n spacer2 = wx.StaticText(self, -1, \"\", wx.DefaultPosition, [10,10], 0 )\n vSizer1.Add( spacer2, 0, wx.ALIGN_CENTER, 5 )\n svSizer2 = wx.StaticBoxSizer( self.lblStBox2, wx.VERTICAL )\n hSizer2 = wx.BoxSizer( wx.HORIZONTAL )\n hSizer2.Add( self.rbXplorer, 0, wx.ALIGN_CENTER|wx.ALL, 5 )\n hSizer2.Add( self.bCluster, 0, wx.ALIGN_CENTER|wx.LEFT|wx.RIGHT|wx.TOP, 5 )\n svSizer2.Add( hSizer2, 0, wx.ALIGN_CENTER_VERTICAL, 5 )\n hSizer3 = wx.BoxSizer( wx.HORIZONTAL )\n hSizer3.Add( self.chJconf, 0, wx.ALIGN_CENTER|wx.ALL, 5 )\n hSizer3.Add( self.bEditJconf, 0, wx.ALIGN_CENTER|wx.ALL, 5 )\n svSizer2.Add( hSizer3, 0, wx.ALIGN_CENTER, 5 )\n vSizer1.Add( svSizer2, 0, wx.GROW|wx.ALIGN_CENTER_VERTICAL, 5 )\n hSizer4 = wx.BoxSizer( wx.HORIZONTAL )\n if windows:\n hSizer4.Add( self.bOk, 0, wx.ALIGN_CENTER|wx.ALL, 5 ) \n hSizer4.Add( self.bCancel, 0, wx.ALIGN_CENTER|wx.LEFT|wx.TOP|wx.BOTTOM, 5 )\n else: \n hSizer4.Add( self.bCancel, 0, wx.ALIGN_CENTER|wx.ALL, 5 ) \n hSizer4.Add( self.bOk, 0, wx.ALIGN_CENTER|wx.LEFT|wx.TOP|wx.BOTTOM, 5 )\n vSizer1.Add( hSizer4, 0, wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL|wx.LEFT|wx.TOP, 5 )\n vSizerMain.Add( vSizer1, 0, wx.ALIGN_CENTER|wx.ALL, 5 ) \n \n vSizerMain.SetSizeHints(self)\n self.SetSizer(vSizerMain)\n #self.CenterOnParent(wx.BOTH)\n ##Set the background color.\n #Style(self)\n if not CLUSTER_ENABLED:\n self.bCluster.Hide()\n ##Set up OK button.\n ##Update Display\n self.React()",
"def import_file_chooser(self):\n filename = tk.filedialog.askopenfilename()\n self._import_path_var.set(filename)",
"def import_file_chooser(self):\n filename = tk.filedialog.askopenfilename()\n self._import_path_var.set(filename)",
"def import_file_chooser(self):\n filename = tk.filedialog.askopenfilename()\n self._import_path_var.set(filename)",
"def on_browse(self, event):\r\n wildcard = \"All files (*.*)|*.*\"\r\n with wx.FileDialog(None, \"Choose a file\",\r\n wildcard=wildcard,\r\n style=wx.ID_OPEN) as dialog:\r\n if dialog.ShowModal() == wx.ID_OK:\r\n self.grin_location.SetValue(dialog.GetPath())",
"def import_file_chooser(self):\n filename = tk.filedialog.askopenfilenames()\n self._import_path_var.set(filename)",
"def browse(self):\n\n self.filepath.set(fd.askopenfilename(initialdir=self._initaldir,\n filetypes=self._filetypes))",
"def set_spec_file(self):\n self.specfile = select_file(os.getcwd())\n if self.specfile is not None:\n self.spec_file_button.setStyleSheet(\"Text-align:left\")\n self.spec_file_button.setText(self.specfile)\n else:\n self.specfile = None\n self.spec_file_button.setText('')\n if self.is_exp_exists() or self.is_exp_set():\n self.set_experiment()",
"def initProperties(self):\n self.setFoldComments(Preferences.getEditor(\"CssFoldComment\"))\n self.setFoldCompact(Preferences.getEditor(\"AllFoldCompact\"))\n try:\n self.setHSSLanguage(\n Preferences.getEditor(\"CssHssSupport\"))\n self.setLessLanguage(\n Preferences.getEditor(\"CssLessSupport\"))\n self.setSCSSLanguage(\n Preferences.getEditor(\"CssSassySupport\"))\n except AttributeError:\n pass",
"def AutomagicalSettings(self):\n # Try to find gclient or repo root first.\n if not self.options.no_search:\n self.toplevel_root = gclient_utils.FindGclientRoot(self.checkout_root)\n if self.toplevel_root:\n logging.info('Found .gclient at %s' % self.toplevel_root)\n else:\n self.toplevel_root = gclient_utils.FindFileUpwards(\n os.path.join('..', '.repo'), self.checkout_root)\n if self.toplevel_root:\n logging.info('Found .repo dir at %s'\n % os.path.dirname(self.toplevel_root))\n\n # Parse TRYSERVER_* settings from codereview.settings before falling back\n # on setting self.options.root manually further down. Otherwise\n # TRYSERVER_ROOT would never be used in codereview.settings.\n self._GclStyleSettings()\n\n if self.toplevel_root and not self.options.root:\n assert os.path.abspath(self.toplevel_root) == self.toplevel_root\n self.options.root = gclient_utils.PathDifference(self.toplevel_root,\n self.checkout_root)\n else:\n self._GclStyleSettings()",
"def select_app():\n panel = Cocoa.NSOpenPanel.openPanel()\n panel.setCanChooseFiles_(True)\n panel.setCanChooseDirectories_(True)\n panel.setResolvesAliases_(True)\n\n if(panel.runModal() == Cocoa.NSOKButton):\n pathArray = panel.filenames()\n path = pathlib.Path(pathArray[0])\n\n plistPath = path /'Contents'/'Info.plist'\n infoFile = plistPath\n\n try:\n appSize = subprocess.check_output(['du', '-shg', str(path)]).split()[0].decode('utf-8')\n n.views['appSize'].setStringValue_(str(appSize))\n except Exception as err:\n print(err)\n\n n.views['appLocation'].setStringValue_(str(path))\n\n try:\n plist = str(infoFile)\n with open(plist, 'rb') as f:\n info = plistlib.load(f)\n\n if 'CFBundleName' in info:\n global collectedName\n collectedName = info['CFBundleName']\n n.views['appName'].setStringValue_(collectedName)\n else:\n n.views['appName'].setStringValue_('')\n\n if 'CFBundleShortVersionString' in info:\n global collectedVersion\n collectedVersion= info['CFBundleShortVersionString']\n n.views['appVersion'].setStringValue_(collectedVersion)\n else:\n n.views['appVersion'].setStringValue_('')\n\n if 'CFBundleIconFile' in info:\n global collectedIcon\n collectedIcon = pathlib.Path(plist).parent / 'Resources' / info['CFBundleIconFile']\n n.views['appIcon'].setStringValue_(str(collectedIcon))\n else:\n n.views['appIcon'].setStringValue_('')\n\n if 'CFBundleIdentifier' in info:\n global collectedIdentifier\n collectedIdentifier = info['CFBundleIdentifier']\n n.views['appIdentifier'].setStringValue_(collectedIdentifier)\n else:\n n.views['appIdentifier'].setStringValue_('')\n\n except Exception as err:\n print('An Error Occured: {0}'.format(err))",
"def FileDialog( message, wildcard, style, defaultDir=os.getcwd(), defaultFile='' ):\n dlg = wx.FileDialog( wx.GetApp().GetTopWindow(), message, defaultDir, defaultFile, wildcard, style )\n if dlg.ShowModal() == wx.ID_OK:\n if style & wx.MULTIPLE:\n result = dlg.GetPaths()\n else:\n result = dlg.GetPath()\n else:\n result = False\n dlg.Destroy()\n \n return result",
"def __init__(\n self,\n title:str=\"Universal File Dialog\",\n icon:str=\"\",\n show_hidden:bool=False,\n include_files:bool=True,\n multiselect:bool=True,\n select_dirs:bool=True,\n select_files:bool=True,\n unix_delimiter:bool=True,\n stdout:bool=False\n ):\n\n if not isinstance(title, str):\n raise TypeError(\"Argument title must be type string.\")\n\n self.title = title\n\n if icon:\n if not isinstance(icon, str):\n raise TypeError(\"Argument icon must be type string.\")\n\n if not isfile(icon):\n raise FileNotFoundError(f\"File not found: {icon}\")\n\n self.icon = icon\n\n else: \n self.icon = \"\"\n\n if show_hidden:\n self.show_hidden = True\n else:\n self.show_hidden = False\n\n if include_files:\n self.include_files = True\n else:\n self.include_files = False\n\n if multiselect:\n self.multiselect = True\n else:\n self.multiselect = False\n\n if select_dirs:\n self.select_dirs = True\n else:\n self.select_dirs = False\n\n if select_files:\n self.select_files = True\n else:\n self.select_files = False\n\n if unix_delimiter:\n self.unix_delimiter = True\n else:\n self.unix_delimiter = False\n\n if stdout:\n self.stdout = True\n else:\n self.stdout = False\n\n # Tkinter:\n self.dialog = Tk()\n self.dialog.withdraw()\n self.dialog.title(self.title)\n self.dialog.minsize(width=300, height=200)\n self.dialog.geometry(\"500x300\")\n self.dialog.update_idletasks()\n\n self.file_icon=PhotoImage(\n file=f\"{dirname(__file__)}/file.gif\",\n master=self.dialog\n ).subsample(50)\n\n self.folder_icon=PhotoImage(\n file=f\"{dirname(__file__)}/folder.gif\",\n master=self.dialog\n ).subsample(15)\n \n self.disk_icon=PhotoImage(\n file=f\"{dirname(__file__)}/disk.gif\",\n master=self.dialog\n ).subsample(15)\n\n if self.icon:\n self.dialog.iconbitmap(self.icon)\n else:\n self.dialog.iconbitmap(f\"{dirname(__file__)}/icon.ico\")\n \n # Widgets:\n self.paneview = PanedWindow(\n self.dialog,\n sashwidth=7,\n bg=\"#cccccc\",\n bd=0,\n )\n\n self.left_pane = PanedWindow(self.paneview)\n self.right_pane = PanedWindow(self.paneview)\n self.paneview.add(self.left_pane)\n self.paneview.add(self.right_pane)\n\n self.treeview_x_scrollbar=Scrollbar(self.left_pane, orient=\"horizontal\")\n self.treeview_y_scrollbar=Scrollbar(self.left_pane, orient=\"vertical\")\n self.list_box_x_scrollbar=Scrollbar(self.right_pane, orient=\"horizontal\")\n self.list_box_y_scrollbar=Scrollbar(self.right_pane, orient=\"vertical\")\n \n # tstyle = Style().configure(\".\", )\n\n self.treeview=Treeview(\n self.left_pane,\n xscrollcommand=self.treeview_x_scrollbar.set,\n yscrollcommand=self.treeview_y_scrollbar.set,\n show=\"tree\",\n selectmode=\"browse\",\n # style=tstyle\n )\n\n\n self.list_box=Listbox(\n self.right_pane,\n xscrollcommand=self.list_box_x_scrollbar.set,\n yscrollcommand=self.list_box_y_scrollbar.set,\n width=34,\n highlightthickness=0,\n bd=2,\n relief=\"ridge\"\n )\n\n if self.multiselect:\n self.list_box.config(selectmode=\"extended\")\n else:\n self.list_box.config(selectmode=\"browse\")\n\n self.cancel_button = Button(\n self.left_pane,\n text=\"Cancel\",\n command=self.cancel\n )\n\n self.submit_button = Button(\n self.right_pane,\n text=\"Submit\",\n command=self.submit\n )\n\n self.treeview_x_scrollbar.config(command=self.treeview.xview)\n self.treeview_y_scrollbar.config(command=self.treeview.yview)\n self.list_box_x_scrollbar.config(command=self.list_box.xview)\n self.list_box_y_scrollbar.config(command=self.list_box.yview)\n \n #Layout:\n self.dialog.rowconfigure(0, weight=1)\n self.dialog.columnconfigure(0, weight=1)\n\n self.left_pane.grid_rowconfigure(0, weight=1)\n self.left_pane.grid_columnconfigure(0, weight=1)\n self.right_pane.grid_rowconfigure(0, weight=1)\n self.right_pane.grid_columnconfigure(0, weight=1)\n\n self.paneview.paneconfigure(\n self.left_pane,\n minsize=100,\n #Start off w/ the sash centered in the GUI:\n width=(self.dialog.winfo_width() / 2) - \n ceil((self.paneview.cget(\"sashwidth\") * 1.5)),\n )\n self.paneview.paneconfigure(self.right_pane, minsize=100)\n\n self.paneview.grid(\n row=0,\n column=0,\n sticky=\"nsew\"\n )\n\n self.treeview.grid(\n row=0,\n column=0,\n sticky=\"nsew\"\n )\n self.treeview_y_scrollbar.grid(\n row=0,\n column=1,\n sticky=\"ns\"\n )\n self.treeview_x_scrollbar.grid(\n row=1,\n column=0,\n columnspan=2,\n sticky=\"ew\"\n )\n\n self.list_box.grid(\n row=0,\n column=0,\n sticky=\"nsew\"\n )\n self.list_box_y_scrollbar.grid(\n row=0,\n column=1,\n sticky=\"ns\"\n )\n self.list_box_x_scrollbar.grid(\n row=1,\n column=0,\n columnspan=2,\n sticky=\"ew\"\n )\n\n self.cancel_button.grid(\n row=2,\n column=0,\n sticky=\"w\",\n padx=10, \n pady=10\n )\n self.submit_button.grid(\n row=2,\n column=0,\n columnspan=2,\n sticky=\"e\",\n padx=10,\n pady=10\n )\n \n #Bindings, Protocols, & Misc:\n self.dialog.bind(\"<Control-w>\", self.cancel)\n self.treeview.bind(\"<<TreeviewSelect>>\", self.treeview_select)\n self.treeview.bind(\"<Double-Button-1>\", self.dialog_populate)\n self.treeview.bind(\"<Return>\", self.dialog_populate)\n self.treeview.bind(\"<Right>\", self.dialog_populate)\n self.list_box.bind(\"<<ListboxSelect>>\", self.list_box_select)\n self.list_box.bind(\"<Return>\", self.submit)\n self.dialog.protocol(\"WM_DELETE_WINDOW\", self.cancel)\n\n self.dialog_selection = deque()\n self.selection_paths = deque()\n\n for disk in self.get_disks():\n self.treeview.insert(\n \"\",\n index=\"end\",\n text=disk,\n image=self.disk_icon,\n )\n\n self.dialog.focus()",
"def test_default_path(self):\n options = ControlOptions()\n options.parseOptions([])\n self.assertEqual(options[\"data-path\"], FilePath(b\"/var/lib/flocker\"))",
"def __call__(self):\n\n (width_offset, height_offset)=self.get_offset(self.dialog)\n self.dialog.geometry(f\"+{width_offset}+{height_offset}\")\n self.dialog.update_idletasks()\n self.dialog.deiconify()\n\n self.dialog.wait_window()\n\n for i, path in enumerate(self.dialog_selection):\n if self.unix_delimiter:\n self.dialog_selection[i] = sub(\"\\\\\\\\\", \"/\", path)\n else:\n self.dialog_selection[i] = sub(\"/\", \"\\\\\\\\\", path)\n\n\n if self.stdout:\n [print(item) for item in self.dialog_selection]\n\n return list(self.dialog_selection)",
"def askopenfilename():\n\n file_opt = options = {}\n options['defaultextension'] = '.*'\n options['initialdir'] = 'User\\\\'\n options['initialfile'] = ''\n options['parent'] = root\n options['title'] = 'choose file'\n options['multiple'] = 1\n\n # get filename\n filename = tk.filedialog.askopenfilename(**file_opt)\n\n if filename:\n self.sourcefile = filename\n if len(filename) is 1:\n file_path_var.set(filename)\n else:\n file_path_var.set(\n \"Multiple files, including {}\".format(filename[0]))",
"def __init__(self, dialog_title='', dialog_format='',\n start_dir=os.path.expanduser('~/'),\n icon_size=(12, 20), minimal_width=200,\n browse_label='Browse', on_open=None,\n reload_button=True, reload_label='Reload',\n recent_files=None, directory_aliases=None,\n allow_empty=True, empty_file_label='(none)'):\n super().__init__()\n self.dialog_title = dialog_title\n self.dialog_format = dialog_format\n self.start_dir = start_dir\n\n # Recent files should also contain `empty_file_label` so\n # when (none) is selected this is stored in settings.\n self.recent_files = recent_files if recent_files is not None else []\n self.directory_aliases = directory_aliases or {}\n self.allow_empty = allow_empty\n self.file_combo = None\n self.empty_file_label = empty_file_label\n if self.empty_file_label not in self.recent_files \\\n and (self.allow_empty or not self.recent_files):\n self.recent_files.append(self.empty_file_label)\n\n self.check_existence()\n self.on_open.connect(on_open)\n\n layout = QHBoxLayout(self)\n layout.setContentsMargins(0, 0, 0, 0)\n\n if recent_files is not None:\n self.file_combo = QComboBox()\n self.file_combo.setMinimumWidth(minimal_width)\n self.file_combo.activated[int].connect(self.select)\n self.update_combo()\n layout.addWidget(self.file_combo)\n\n self.browse_button = QPushButton(browse_label)\n self.browse_button.setFocusPolicy(Qt.NoFocus)\n self.browse_button.clicked.connect(self.browse)\n self.browse_button.setIcon(self.style()\n .standardIcon(QStyle.SP_DirOpenIcon))\n self.browse_button.setIconSize(QSize(*icon_size))\n self.browse_button.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)\n layout.addWidget(self.browse_button)\n\n if reload_button:\n self.reload_button = QPushButton(reload_label)\n self.reload_button.setFocusPolicy(Qt.NoFocus)\n self.reload_button.clicked.connect(self.reload)\n self.reload_button.setIcon(self.style()\n .standardIcon(QStyle.SP_BrowserReload))\n self.reload_button.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)\n self.reload_button.setIconSize(QSize(*icon_size))\n layout.addWidget(self.reload_button)",
"def askOpenFileName(parent, title, wc, remember =- 1, filetype = None):\n\tasklist = []\n\tif remember == -1:\n\t\tconf = Configuration.getConfiguration()\n\t\tremember = conf.getConfigItem(\"RememberPath\", \"Paths\")\n\tlastpath = \"\"\n\tftype = wc.split(\"|\")[1]\n\tftype = ftype.split(\".\")[1]\n\tif filetype != None:\n\t\tftype = filetype\n\tif remember:\n\t\tlastpath = conf.getConfigItem(\"LastPath_%s\" % ftype, \"Paths\")\n\t\tif not lastpath:\n\t\t\tlastpath = \".\"\n\tdlg = wx.FileDialog(parent, title, lastpath, wildcard = wc, style = wx.OPEN|wx.MULTIPLE)\n\tif dlg.ShowModal() == wx.ID_OK:\n\t\tasklist = dlg.GetPaths()\n\t\tasklist = map(unicode, asklist)\n\t\tif not asklist:\n\t\t\treturn asklist\n\t\tif remember:\n\t\t\tfilepath = os.path.dirname(asklist[0])\n\t\t\tconf.setConfigItem(\"LastPath_%s\" % ftype, \"Paths\", filepath)\n\t\t\n\tdlg.Destroy() \n\treturn asklist",
"def configure(prompt_list):\n darwin_vers = int(os.uname()[2].split('.')[0])\n edited_prefs = {}\n for (key, prompt) in prompt_list:\n newvalue = get_input_with_default('%15s: ' % prompt, pref(key))\n if darwin_vers == 10:\n # old behavior in SL: hitting return gives you an empty string,\n # and means accept the default value.\n edited_prefs[key] = newvalue or pref(key) or ''\n else:\n # just use the edited value as-is\n edited_prefs[key] = newvalue\n\n if FOUNDATION_SUPPORT:\n for key, value in edited_prefs.items():\n try:\n CFPreferencesSetAppValue(key, value, BUNDLE_ID)\n except BaseException:\n print('Could not save configuration!', file=sys.stderr)\n raise ConfigurationSaveError\n # remove repo_path if it exists since we don't use that\n # any longer (except for backwards compatibility) and we don't\n # want it getting out of sync with the repo_url\n CFPreferencesSetAppValue('repo_path', None, BUNDLE_ID)\n CFPreferencesAppSynchronize(BUNDLE_ID)\n\n else:\n try:\n existing_prefs = readPlist(PREFSPATH)\n existing_prefs.update(edited_prefs)\n # remove repo_path if it exists since we don't use that\n # any longer (except for backwards compatibility) and we don't\n # want it getting out of sync with the repo_url\n if 'repo_path' in existing_prefs:\n del existing_prefs['repo_path']\n writePlist(existing_prefs, PREFSPATH)\n except (IOError, OSError, ExpatError):\n print('Could not save configuration to %s' % PREFSPATH,\n file=sys.stderr)\n raise ConfigurationSaveError"
] | [
"0.59659076",
"0.52273905",
"0.52243555",
"0.5196339",
"0.51798254",
"0.51786554",
"0.51282394",
"0.5127446",
"0.51222116",
"0.51085234",
"0.5083295",
"0.50667393",
"0.5033301",
"0.5033301",
"0.5033301",
"0.5008447",
"0.5006361",
"0.4993501",
"0.49912578",
"0.49555835",
"0.49392104",
"0.49265411",
"0.49237505",
"0.49206007",
"0.49100357",
"0.48970252",
"0.48811245",
"0.48734605",
"0.4852869",
"0.4848985"
] | 0.60916245 | 0 |
sets up System Properties for FileDialog() to behave as required >> Mac only | def setFileDialogParameters(lReportOnly=False, lDefaults=False, lSelectDirectories=None, lPackagesT=None):
myPrint("D", "In ", inspect.currentframe().f_code.co_name, "()")
if not Platform.isOSX(): return
_TRUE = "true"
_FALSE = "false"
_DIRS_FD = "apple.awt.fileDialogForDirectories" # When True you can select a Folder (rather than a file)
_PKGS_FD = "apple.awt.use-file-dialog-packages" # When True allows you to select a 'bundle' as a file; False means navigate inside the bundle
# "com.apple.macos.use-file-dialog-packages" # DEPRECATED since Monterrey - discovered this about MD2022.5(4090) - refer: java.desktop/sun/lwawt/macosx/CFileDialog.java
# FileDialog defaults
# "apple.awt.fileDialogForDirectories" default "false" >> set "true" to allow Directories to be selected
# "apple.awt.use-file-dialog-packages" default "true" >> set "false" to allow access to Mac 'packages'
if debug or lReportOnly:
myPrint("B", "Parameters set: ReportOnly: %s, Defaults:%s, SelectDirectories:%s, PackagesT:%s" % (lReportOnly, lDefaults, lSelectDirectories, lPackagesT))
txt = ("Before setting" if not lReportOnly else "Reporting only")
for setting in [_DIRS_FD, _PKGS_FD]: myPrint("DB", "%s: '%s': '%s'" %(pad(txt,14), pad(setting,50), System.getProperty(setting)))
if lReportOnly: return
if lDefaults:
System.setProperty(_DIRS_FD,_FALSE)
System.setProperty(_PKGS_FD,_TRUE)
else:
if lSelectDirectories is not None: System.setProperty(_DIRS_FD, (_TRUE if lSelectDirectories else _FALSE))
if lPackagesT is not None: System.setProperty(_PKGS_FD, (_TRUE if lPackagesT else _FALSE))
for setting in [_DIRS_FD, _PKGS_FD]: myPrint("DB", "After setting: '%s': '%s'" %(pad(setting,50), System.getProperty(setting)))
return | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _startup_system(self):\n\n self._config_path.set(filedialog.asksaveasfilename())\n self._system = System(self._config_path.get())\n\n self._start_frame.pack_forget()\n self._main_frame.pack()",
"def pkg_app_file_chooser(self):\n filename = tk.filedialog.askopenfilename()\n self._pkg_app_path_var.set(filename)",
"def persist_macos(self) -> None:",
"def user_safety_config():\n\n\tprint_section_header(\"USER SAFETY\", Fore.BLUE)\n\n\tif prompt_yes_no(top_line=\"-> Lock Mac as soon as screen saver starts?\",\n\t bottom_line=\"If your screen is black or on screensaver mode, you'll be prompted for a password to login every time.\"):\n\t\tprint_confirmation(\"Configuring account lock on screensaver...\")\n\t\tsp.run(['defaults', 'write', 'com.apple.screensaver', 'askForPassword', '-int', '1'], stdout=sp.PIPE)\n\t\tsp.run(['defaults', 'write', 'com.apple.screensaver', 'askForPasswordDelay', '-int', '0'], stdout=sp.PIPE)\n\n\tif prompt_yes_no(top_line=\"-> Display all file extensions?\",\n\t bottom_line=\"This prevents malware from disguising itself as another file type.\"):\n\t\tprint_confirmation(\"Configuring display of all file extensions...\")\n\t\tsp.run(['defaults', 'write', 'NSGlobalDomain', 'AppleShowAllExtensions', '-bool', 'true'], stdout=sp.PIPE)\n\n\tif prompt_yes_no(top_line=\"-> Disable saving to the cloud by default?\",\n\t bottom_line=\"This prevents sensitive documents from being unintentionally stored on the cloud.\"):\n\t\tprint_confirmation(\"Disabling cloud saving by default...\")\n\t\tsp.run(['defaults', 'write', 'NSGlobalDomain', 'NSDocumentSaveNewDocumentsToCloud', '-bool', 'false'], stdout=sp.PIPE)\n\n\tif prompt_yes_no(top_line=\"-> Show hidden files in Finder?\",\n\t bottom_line=\"This lets you see all files on the system without having to use the terminal.\"):\n\t\tprint_confirmation(\"Displaying hidden files in Finder...\")\n\t\tsp.run(['defaults', 'write', 'com.apple.finder', 'AppleShowAllFiles', '-boolean', 'true'], shell=True, stdout=sp.PIPE)\n\n\t# Reset finder after messing with it.\n\tprint_confirmation(\"Resetting Finder to finalize changes...\")\n\tsp.run(['killAll', 'Finder'], stdout=sp.PIPE)",
"def fileDialog(*args, application: bool=True, defaultFileName: AnyStr=\"\", directoryMask:\n AnyStr=\"\", mode: int=0, title: AnyStr=\"\", **kwargs)->AnyStr:\n pass",
"def set_spec_file(self):\n self.specfile = select_file(os.getcwd())\n if self.specfile is not None:\n self.spec_file_button.setStyleSheet(\"Text-align:left\")\n self.spec_file_button.setText(self.specfile)\n else:\n self.specfile = None\n self.spec_file_button.setText('')\n if self.is_exp_exists() or self.is_exp_set():\n self.set_experiment()",
"def input_file_chooser(self):\n filename = tk.filedialog.askopenfilename()\n self._input_path_var.set(filename)",
"def __init__(\n self,\n title:str=\"Universal File Dialog\",\n icon:str=\"\",\n show_hidden:bool=False,\n include_files:bool=True,\n multiselect:bool=True,\n select_dirs:bool=True,\n select_files:bool=True,\n unix_delimiter:bool=True,\n stdout:bool=False\n ):\n\n if not isinstance(title, str):\n raise TypeError(\"Argument title must be type string.\")\n\n self.title = title\n\n if icon:\n if not isinstance(icon, str):\n raise TypeError(\"Argument icon must be type string.\")\n\n if not isfile(icon):\n raise FileNotFoundError(f\"File not found: {icon}\")\n\n self.icon = icon\n\n else: \n self.icon = \"\"\n\n if show_hidden:\n self.show_hidden = True\n else:\n self.show_hidden = False\n\n if include_files:\n self.include_files = True\n else:\n self.include_files = False\n\n if multiselect:\n self.multiselect = True\n else:\n self.multiselect = False\n\n if select_dirs:\n self.select_dirs = True\n else:\n self.select_dirs = False\n\n if select_files:\n self.select_files = True\n else:\n self.select_files = False\n\n if unix_delimiter:\n self.unix_delimiter = True\n else:\n self.unix_delimiter = False\n\n if stdout:\n self.stdout = True\n else:\n self.stdout = False\n\n # Tkinter:\n self.dialog = Tk()\n self.dialog.withdraw()\n self.dialog.title(self.title)\n self.dialog.minsize(width=300, height=200)\n self.dialog.geometry(\"500x300\")\n self.dialog.update_idletasks()\n\n self.file_icon=PhotoImage(\n file=f\"{dirname(__file__)}/file.gif\",\n master=self.dialog\n ).subsample(50)\n\n self.folder_icon=PhotoImage(\n file=f\"{dirname(__file__)}/folder.gif\",\n master=self.dialog\n ).subsample(15)\n \n self.disk_icon=PhotoImage(\n file=f\"{dirname(__file__)}/disk.gif\",\n master=self.dialog\n ).subsample(15)\n\n if self.icon:\n self.dialog.iconbitmap(self.icon)\n else:\n self.dialog.iconbitmap(f\"{dirname(__file__)}/icon.ico\")\n \n # Widgets:\n self.paneview = PanedWindow(\n self.dialog,\n sashwidth=7,\n bg=\"#cccccc\",\n bd=0,\n )\n\n self.left_pane = PanedWindow(self.paneview)\n self.right_pane = PanedWindow(self.paneview)\n self.paneview.add(self.left_pane)\n self.paneview.add(self.right_pane)\n\n self.treeview_x_scrollbar=Scrollbar(self.left_pane, orient=\"horizontal\")\n self.treeview_y_scrollbar=Scrollbar(self.left_pane, orient=\"vertical\")\n self.list_box_x_scrollbar=Scrollbar(self.right_pane, orient=\"horizontal\")\n self.list_box_y_scrollbar=Scrollbar(self.right_pane, orient=\"vertical\")\n \n # tstyle = Style().configure(\".\", )\n\n self.treeview=Treeview(\n self.left_pane,\n xscrollcommand=self.treeview_x_scrollbar.set,\n yscrollcommand=self.treeview_y_scrollbar.set,\n show=\"tree\",\n selectmode=\"browse\",\n # style=tstyle\n )\n\n\n self.list_box=Listbox(\n self.right_pane,\n xscrollcommand=self.list_box_x_scrollbar.set,\n yscrollcommand=self.list_box_y_scrollbar.set,\n width=34,\n highlightthickness=0,\n bd=2,\n relief=\"ridge\"\n )\n\n if self.multiselect:\n self.list_box.config(selectmode=\"extended\")\n else:\n self.list_box.config(selectmode=\"browse\")\n\n self.cancel_button = Button(\n self.left_pane,\n text=\"Cancel\",\n command=self.cancel\n )\n\n self.submit_button = Button(\n self.right_pane,\n text=\"Submit\",\n command=self.submit\n )\n\n self.treeview_x_scrollbar.config(command=self.treeview.xview)\n self.treeview_y_scrollbar.config(command=self.treeview.yview)\n self.list_box_x_scrollbar.config(command=self.list_box.xview)\n self.list_box_y_scrollbar.config(command=self.list_box.yview)\n \n #Layout:\n self.dialog.rowconfigure(0, weight=1)\n self.dialog.columnconfigure(0, weight=1)\n\n self.left_pane.grid_rowconfigure(0, weight=1)\n self.left_pane.grid_columnconfigure(0, weight=1)\n self.right_pane.grid_rowconfigure(0, weight=1)\n self.right_pane.grid_columnconfigure(0, weight=1)\n\n self.paneview.paneconfigure(\n self.left_pane,\n minsize=100,\n #Start off w/ the sash centered in the GUI:\n width=(self.dialog.winfo_width() / 2) - \n ceil((self.paneview.cget(\"sashwidth\") * 1.5)),\n )\n self.paneview.paneconfigure(self.right_pane, minsize=100)\n\n self.paneview.grid(\n row=0,\n column=0,\n sticky=\"nsew\"\n )\n\n self.treeview.grid(\n row=0,\n column=0,\n sticky=\"nsew\"\n )\n self.treeview_y_scrollbar.grid(\n row=0,\n column=1,\n sticky=\"ns\"\n )\n self.treeview_x_scrollbar.grid(\n row=1,\n column=0,\n columnspan=2,\n sticky=\"ew\"\n )\n\n self.list_box.grid(\n row=0,\n column=0,\n sticky=\"nsew\"\n )\n self.list_box_y_scrollbar.grid(\n row=0,\n column=1,\n sticky=\"ns\"\n )\n self.list_box_x_scrollbar.grid(\n row=1,\n column=0,\n columnspan=2,\n sticky=\"ew\"\n )\n\n self.cancel_button.grid(\n row=2,\n column=0,\n sticky=\"w\",\n padx=10, \n pady=10\n )\n self.submit_button.grid(\n row=2,\n column=0,\n columnspan=2,\n sticky=\"e\",\n padx=10,\n pady=10\n )\n \n #Bindings, Protocols, & Misc:\n self.dialog.bind(\"<Control-w>\", self.cancel)\n self.treeview.bind(\"<<TreeviewSelect>>\", self.treeview_select)\n self.treeview.bind(\"<Double-Button-1>\", self.dialog_populate)\n self.treeview.bind(\"<Return>\", self.dialog_populate)\n self.treeview.bind(\"<Right>\", self.dialog_populate)\n self.list_box.bind(\"<<ListboxSelect>>\", self.list_box_select)\n self.list_box.bind(\"<Return>\", self.submit)\n self.dialog.protocol(\"WM_DELETE_WINDOW\", self.cancel)\n\n self.dialog_selection = deque()\n self.selection_paths = deque()\n\n for disk in self.get_disks():\n self.treeview.insert(\n \"\",\n index=\"end\",\n text=disk,\n image=self.disk_icon,\n )\n\n self.dialog.focus()",
"def system_properties(self):\r\n return dict(self._get_system_properties(self.java))",
"def readInConfigFileDlg( self ):\n pass",
"def saveInConfigFileDlg( self ):\n pass",
"def init_conf_windows(settings={}):\n if os.name == 'nt':\n original_settings = conf.settings\n conf.settings = conf.Config(conf.FIXED_SETTINGS, conf.ADJUSTABLE_SETTINGS)\n conf.settings.installation_id = conf.settings.get_installation_id()\n conf.settings.update(settings)",
"def onLoad (self):\n #productive #onButton\n profprint()\n self.fileDialog = qt.QFileDialog(self.parent)\n self.fileDialog.setDirectory(slicer.modules.needlefinder.path.replace(\"NeedleFinder.py\",\"Config\"))\n self.fileDialog.options = self.fileDialog.DontUseNativeDialog\n self.fileDialog.acceptMode = self.fileDialog.AcceptOpen\n self.fileDialog.defaultSuffix = \"cfg\"\n self.fileDialog.setNameFilter(\"Configuration File (*.cfg)\")\n self.fileDialog.connect(\"fileSelected(QString)\", self.onLoadFileSelected)\n self.fileDialog.show()",
"def checkOS():\n\tglobal fileSeperator\n\tif sys.platform=='win32':\n\t\tprint \"System identified as Windows.\"\n\t\tfileSeperator = \"\\\\\"\n\telse:\n\t\tprint \"System identified as MacOSX.\"\n\t\tfileSeperator = \"/\"",
"def onLoad (self):\r\n # productive #onButton\r\n profprint()\r\n self.dirDialog = qt.QFileDialog(self.parent)\r\n self.dirDialog.setDirectory(slicer.modules.needlefinder.path.replace(\"NeedleFinder.py\", \"Config\"))\r\n self.dirDialog.options = self.dirDialog.DontUseNativeDialog\r\n self.dirDialog.acceptMode = self.dirDialog.AcceptOpen\r\n self.dirDialog.defaultSuffix = \"cfg\"\r\n self.dirDialog.setNameFilter(\"Configuration File (*.cfg)\")\r\n self.dirDialog.connect(\"fileSelected(QString)\", self.onLoadFileSelected)\r\n self.dirDialog.show()",
"def select_app():\n panel = Cocoa.NSOpenPanel.openPanel()\n panel.setCanChooseFiles_(True)\n panel.setCanChooseDirectories_(True)\n panel.setResolvesAliases_(True)\n\n if(panel.runModal() == Cocoa.NSOKButton):\n pathArray = panel.filenames()\n path = pathlib.Path(pathArray[0])\n\n plistPath = path /'Contents'/'Info.plist'\n infoFile = plistPath\n\n try:\n appSize = subprocess.check_output(['du', '-shg', str(path)]).split()[0].decode('utf-8')\n n.views['appSize'].setStringValue_(str(appSize))\n except Exception as err:\n print(err)\n\n n.views['appLocation'].setStringValue_(str(path))\n\n try:\n plist = str(infoFile)\n with open(plist, 'rb') as f:\n info = plistlib.load(f)\n\n if 'CFBundleName' in info:\n global collectedName\n collectedName = info['CFBundleName']\n n.views['appName'].setStringValue_(collectedName)\n else:\n n.views['appName'].setStringValue_('')\n\n if 'CFBundleShortVersionString' in info:\n global collectedVersion\n collectedVersion= info['CFBundleShortVersionString']\n n.views['appVersion'].setStringValue_(collectedVersion)\n else:\n n.views['appVersion'].setStringValue_('')\n\n if 'CFBundleIconFile' in info:\n global collectedIcon\n collectedIcon = pathlib.Path(plist).parent / 'Resources' / info['CFBundleIconFile']\n n.views['appIcon'].setStringValue_(str(collectedIcon))\n else:\n n.views['appIcon'].setStringValue_('')\n\n if 'CFBundleIdentifier' in info:\n global collectedIdentifier\n collectedIdentifier = info['CFBundleIdentifier']\n n.views['appIdentifier'].setStringValue_(collectedIdentifier)\n else:\n n.views['appIdentifier'].setStringValue_('')\n\n except Exception as err:\n print('An Error Occured: {0}'.format(err))",
"def askopenfilename(self, *args, **kw):\n\n self.tk.tk_setPalette('#888888')\n save_update_step = self.update_step\n self.update_step = 0\n\n filename = tkinter.filedialog.askopenfilename(parent=self.tk)\n if filename:\n self.readwtf(filename)\n self.redraw_letters()\n self.update_step = save_update_step\n self.tk.tk_setPalette('#000000')",
"def configure(prompt_list):\n darwin_vers = int(os.uname()[2].split('.')[0])\n edited_prefs = {}\n for (key, prompt) in prompt_list:\n newvalue = get_input_with_default('%15s: ' % prompt, pref(key))\n if darwin_vers == 10:\n # old behavior in SL: hitting return gives you an empty string,\n # and means accept the default value.\n edited_prefs[key] = newvalue or pref(key) or ''\n else:\n # just use the edited value as-is\n edited_prefs[key] = newvalue\n\n if FOUNDATION_SUPPORT:\n for key, value in edited_prefs.items():\n try:\n CFPreferencesSetAppValue(key, value, BUNDLE_ID)\n except BaseException:\n print('Could not save configuration!', file=sys.stderr)\n raise ConfigurationSaveError\n # remove repo_path if it exists since we don't use that\n # any longer (except for backwards compatibility) and we don't\n # want it getting out of sync with the repo_url\n CFPreferencesSetAppValue('repo_path', None, BUNDLE_ID)\n CFPreferencesAppSynchronize(BUNDLE_ID)\n\n else:\n try:\n existing_prefs = readPlist(PREFSPATH)\n existing_prefs.update(edited_prefs)\n # remove repo_path if it exists since we don't use that\n # any longer (except for backwards compatibility) and we don't\n # want it getting out of sync with the repo_url\n if 'repo_path' in existing_prefs:\n del existing_prefs['repo_path']\n writePlist(existing_prefs, PREFSPATH)\n except (IOError, OSError, ExpatError):\n print('Could not save configuration to %s' % PREFSPATH,\n file=sys.stderr)\n raise ConfigurationSaveError",
"def __call__(self):\n\n (width_offset, height_offset)=self.get_offset(self.dialog)\n self.dialog.geometry(f\"+{width_offset}+{height_offset}\")\n self.dialog.update_idletasks()\n self.dialog.deiconify()\n\n self.dialog.wait_window()\n\n for i, path in enumerate(self.dialog_selection):\n if self.unix_delimiter:\n self.dialog_selection[i] = sub(\"\\\\\\\\\", \"/\", path)\n else:\n self.dialog_selection[i] = sub(\"/\", \"\\\\\\\\\", path)\n\n\n if self.stdout:\n [print(item) for item in self.dialog_selection]\n\n return list(self.dialog_selection)",
"def action(self):\n self.filename = self.ui_SelectedName.text()\n if self.filename == \"\" or self.filename is None:\n return\n\n dirname = fs.path.forcedir(\".\")\n if self.wparm is not None:\n dirname = self.selected_dir\n if dirname.startswith(self.active_url):\n filename = \"{}{}\".format(fs.path.forcedir(self.active_url), self.filename)\n else:\n # We can't use fs.path.join and also not fs.path.abspath because of protocol url\n filename = \"{}{}{}\".format(\n fs.path.forcedir(self.active_url),\n fs.path.forcedir(dirname),\n self.filename,\n )\n filename = filename.replace(fs.path.forcedir(\".\"), \"\")\n if self.show_save_action and not self.show_dirs_only:\n self.save_settings()\n self.filename = self.ui_SelectedName.text()\n if self.filename == \"\":\n return\n info = self.get_info(fs.path.split(filename)[1], namespaces=None)\n if info is not None and info.is_dir:\n sel = QtWidgets.QMessageBox.warning(\n self,\n \"Warning\",\n \"You can't create a file with this name: {0}\".format(self.filename),\n QtWidgets.QMessageBox.No,\n )\n elif info is not None and info.is_file:\n sel = QtWidgets.QMessageBox.question(\n self,\n \"Replace Filename\",\n \"This will replace the filename: {0}. Continue?\".format(\n self.filename\n ),\n QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,\n )\n if sel == QtWidgets.QMessageBox.Yes:\n self.filename = filename\n self.close()\n else:\n pass\n else:\n self.filename = filename\n self.close()\n else:\n self.filename = filename\n self.close()",
"def on_open_file(self):\n return tkFileDialog.askopenfilename(\n filetypes=[('default', '*.txt'), ('All files', '*.*')])",
"def askOpen(parent,title='',defaultDir='',defaultFile='',wildcard='',style=wx.OPEN):\r\n defaultDir,defaultFile = [GPath(x).s for x in (defaultDir,defaultFile)]\r\n dialog = wx.FileDialog(parent,title,defaultDir,defaultFile,wildcard, style )\r\n if dialog.ShowModal() != wx.ID_OK: \r\n result = False\r\n elif style & wx.MULTIPLE:\r\n result = map(GPath,dialog.GetPaths())\r\n else:\r\n result = GPath(dialog.GetPath())\r\n dialog.Destroy()\r\n return result",
"def showpreferencefiles():\n process = subprocess.check_output(['ls', os.path.dirname(os.path.abspath(__file__))+'/Preferences'])\n print()\n for eachFile in process.decode('utf-8').split('\\n'):\n if '__' not in eachFile and '~' not in eachFile:\n print(' {}'.format(eachFile))",
"def mv_properties(self):\n f = '/coretemp/coretemp.properties'\n b = os.getcwd()\n shutil.copy2(b+f, '/etc/')",
"def browse(self):\n\n self.filepath.set(fd.askopenfilename(initialdir=self._initaldir,\n filetypes=self._filetypes))",
"def set(self):\n \n ffmpeg_installed = misc.askquestion(DialogTitle='FFMPEG Check',\n Question='Is FFMPEG installed?')\n \n if ffmpeg_installed:\n ffmpeg_dir = misc.get_dir(DialogTitle='Please select the directory where FFMPEG (binary) is installed:')\n \n if sys.platform=='win32':\n self.ffmpeg = os.path.join(ffmpeg_dir, 'ffmpeg.exe')\n self.ffplay = os.path.join(ffmpeg_dir, 'ffplay.exe')\n else:\n self.ffmpeg = os.path.join(ffmpeg_dir, 'ffmpeg')\n self.ffplay = os.path.join(ffmpeg_dir, 'ffplay')\n \n if not os.path.exists(self.ffmpeg):\n print('Sorry, {0} does not exist!'.format(self.ffmpeg))\n return\n \n if not os.path.exists(self.ffplay):\n print('Sorry, {0} does not exist!'.format(self.ffplay))\n return\n \n else:\n self.ffmpeg = None\n self.ffplay = None\n \n # Save them to the default config file\n info = {'ffmpeg':self.ffmpeg, 'ffplay': self.ffplay}\n try:\n with open(self.config_file, 'w') as outFile:\n json.dump(info, outFile)\n print('Config information written to {0}'.format(os.path.abspath(self.config_file)))\n except PermissionError as e:\n curDir = os.path.abspath(os.curdir)\n print('Current directory: {0}'.format(curDir))\n print('Error: {0}'.format(e))\n \n return",
"def on_coding_standard_file_browse(self, *args):\n file = GPS.MDI.file_selector()\n if file.path != \"\":\n self.fileEntry.set_text(file.path)",
"def on_coding_standard_file_browse(self, *args):\n file = GPS.MDI.file_selector()\n if file.path != \"\":\n self.fileEntry.set_text(file.path)",
"def readConfigFileDlg( self ):\n fileName = QtGui.QFileDialog.getOpenFileName( self, \"Read application config file\", self.rsrc.lastFolder, \"Config files (*.cfg)\" )\n if ( fileName ):\n self.readConfigFile( fileName )\n path, fName = os.path.split( str( fileName ) )\n self.rsrc.lastFolder = path",
"def preferences(self):\n from dialogs import preferencesDialog\n prefs=preferencesDialog.Create(self,-1,'')\n prefs.ShowModal()"
] | [
"0.6109975",
"0.5899527",
"0.5769735",
"0.55686957",
"0.55619633",
"0.5373799",
"0.5356747",
"0.5335496",
"0.53288305",
"0.5298387",
"0.5294953",
"0.52298236",
"0.5206536",
"0.51610196",
"0.514091",
"0.50889593",
"0.5086334",
"0.5081264",
"0.5057602",
"0.50511295",
"0.5033079",
"0.5030382",
"0.5021893",
"0.5000733",
"0.49955884",
"0.49872825",
"0.4971711",
"0.4971711",
"0.49711868",
"0.49615443"
] | 0.61366576 | 0 |
This triggers MD to firePreferencesUpdated().... Hopefully refreshing Home Screen Views too | def fireMDPreferencesUpdated():
myPrint("DB", "In ", inspect.currentframe().f_code.co_name, "()" )
class FPSRunnable(Runnable):
def __init__(self): pass
def run(self):
myPrint("DB",".. Inside FPSRunnable() - calling firePreferencesUpdated()...")
myPrint("B","Triggering an update to the Summary/Home Page View")
MD_REF.getPreferences().firePreferencesUpdated()
if not SwingUtilities.isEventDispatchThread():
myPrint("DB",".. Not running within the EDT so calling via FPSRunnable()...")
SwingUtilities.invokeLater(FPSRunnable())
else:
myPrint("DB",".. Already running within the EDT so calling FPSRunnable() naked...")
FPSRunnable().run()
return | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def refresh_screen(self):",
"def preferencesChanged(self):\n # do nothing\n pass",
"def on_refresh(self):\n pass",
"def updateSettingsUI(self):\n\n pass",
"def applyPrefs (self):\r\n self.storyPanel.eachWidget(lambda w: w.applyPrefs())\r\n self.storyPanel.Refresh()",
"def on_pre_enter(self):\n Logger.info('Application: Changed to the Settings screen.')",
"def onPreferences():\n dialog = prefDialog()\n dialog.show()",
"def onSettings(self):\n pass",
"def preferencesChanged(self):\n self.__logViewer.preferencesChanged()",
"def refresh(self) -> None:\n self.screen.refresh()",
"def onUpdated(self):",
"def Refresh(self):\n pass",
"def refresh_view():\n pass",
"def _on_pref_decls_updated(self, change):\n self._refresh_pref_decls()",
"def _refresh(self):\n self._need_display_update = True\n self._update()",
"def _refresh_screen(self):\n self.myscreen.refresh()\n self.box1.refresh()\n self.box2.refresh()",
"def refresh_plugin(self):\n pass",
"def refresh(self):\n pass",
"def refresh(self):\n pass",
"def refresh(self, view_manager):\n pass",
"def preferencesChanged(self):\n # reload the APIs\n self.apisManager.reloadAPIs()\n \n # reload editor settings\n for editor in self.editors:\n zoom = editor.getZoom()\n editor.readSettings()\n editor.zoomTo(zoom)\n \n # reload the autosave timer setting\n self.autosaveInterval = Preferences.getEditor(\"AutosaveInterval\")\n if len(self.editors):\n if (\n self.autosaveTimer.isActive() and\n self.autosaveInterval == 0\n ):\n self.autosaveTimer.stop()\n elif (\n not self.autosaveTimer.isActive() and\n self.autosaveInterval > 0\n ):\n self.autosaveTimer.start(self.autosaveInterval * 60000)\n \n self.__enableSpellingActions()",
"def onPreferences():\n cpp.createWidgets()\n dialog = cpp.dialog()\n dialog.show()",
"def on_settings(self):\n\n # Pull the current app state from the relay Observer object\n status, interval, ntfc_status, ntfc_state = settings_state.get_state()\n\n # Pass it to the Observable object in order to render the Settings window\n settings_changed, update_interval, ntfc_changed, ntfc_selected = render_settings_window(\n status, interval, ntfc_status, ntfc_state, settings_state)\n\n # Register any state changes\n settings_state.update_state(settings_changed, update_interval, ntfc_changed, ntfc_selected)\n\n # If the interval has changed, reprogram scheduler to run at the new interval\n if settings_state.intrvl_change_trig:\n modify_scheduler(JOB_ID, settings_state.settings_interval)\n\n if settings_state.notification_change_trig:\n NewsIndicator.notifications = False if not settings_state.notification_state else True",
"def refresh(self) -> None:\n pass",
"def refresh(self) -> None:\n pass",
"def refresh(self) -> None:\n pass",
"def _refresh(self):\n # if we have all the values we need to hookup to the URL\n for key in self.DBMSettings.keys():\n if not key.startswith(LOCALCHAR):\n self.DBMSettings[key] = self._urldict()[key]",
"def update_ui(self):\n # main data\n self.lAcc.setText(self.settings.ACCOUNT)\n # self.lExcessLiquidity.setText(str(self.ibkrworker.app.excessLiquidity))\n # self.lSma.setText(str(self.ibkrworker.app.sMa))\n if hasattr(self.ibkrworker.app, 'smaWithSafety'):\n self.lSma.setText(str(round(self.ibkrworker.app.smaWithSafety, 1)))\n else:\n self.lSma.setText(str(round(self.ibkrworker.app.sMa, 1)))\n self.lMarketValue.setText(str(self.ibkrworker.app.netLiquidation))\n self.lblAvailTrades.setText(str(self.ibkrworker.app.tradesRemaining))\n self.lcdPNL.display(self.ibkrworker.app.dailyPnl)\n if self.ibkrworker.app.dailyPnl > 0:\n palette = self.lcdPNL.palette()\n palette.setColor(palette.WindowText, QtGui.QColor(51, 153, 51))\n self.lcdPNL.setPalette(palette)\n elif self.ibkrworker.app.dailyPnl < 0:\n palette = self.lcdPNL.palette()\n palette.setColor(palette.WindowText, QtGui.QColor(255, 0, 0))\n self.lcdPNL.setPalette(palette)\n\n total_positions_value = 0\n for p in self.ibkrworker.app.openPositions.values():\n if hasattr(p, 'Value'):\n total_positions_value += p[\"Value\"]\n self.lPositionsTotalValue.setText(str(round(total_positions_value, 1)))\n\n self.update_open_positions()\n self.update_live_candidates()\n self.update_open_orders()\n\n # everything disabled for safety - is now enabled\n self.chbxProcess.setEnabled(True)\n self.btnSettings.setEnabled(True)\n\n self.update_session_state()\n\n if not self.uiTimer.isActive():\n self.update_console(\"UI resumed.\")\n self.uiTimer.start(int(self.settings.INTERVALUI) * 1000) # reset the ui timer",
"def _breakpoints_changed(self):\n if not self.view:\n return\n\n if self._ignore_signals:\n return\n\n self.view.refresh()",
"def update_current_screen(self):\n\t\tself.current_screen.update()"
] | [
"0.70166314",
"0.67420226",
"0.67126137",
"0.6558181",
"0.6547368",
"0.6456297",
"0.62285334",
"0.61892396",
"0.6178164",
"0.6083877",
"0.59968984",
"0.59863025",
"0.5978254",
"0.5940049",
"0.5935096",
"0.5920283",
"0.59084743",
"0.58467805",
"0.58467805",
"0.58374125",
"0.58041275",
"0.5804079",
"0.5803662",
"0.5737976",
"0.5737976",
"0.5737976",
"0.5722338",
"0.56882226",
"0.56865776",
"0.5680478"
] | 0.7383542 | 0 |
Will detect and then run the codeblock on the EDT | def genericSwingEDTRunner(ifOffEDTThenRunNowAndWait, ifOnEDTThenRunNowAndWait, codeblock, *args):
isOnEDT = SwingUtilities.isEventDispatchThread()
# myPrint("DB", "** In .genericSwingEDTRunner(), ifOffEDTThenRunNowAndWait: '%s', ifOnEDTThenRunNowAndWait: '%s', codeblock: '%s', args: '%s'" %(ifOffEDTThenRunNowAndWait, ifOnEDTThenRunNowAndWait, codeblock, args))
myPrint("DB", "** In .genericSwingEDTRunner(), ifOffEDTThenRunNowAndWait: '%s', ifOnEDTThenRunNowAndWait: '%s', codeblock: <codeblock>, args: <args>" %(ifOffEDTThenRunNowAndWait, ifOnEDTThenRunNowAndWait))
myPrint("DB", "** In .genericSwingEDTRunner(), isOnEDT:", isOnEDT)
class GenericSwingEDTRunner(Runnable):
def __init__(self, _codeblock, arguments):
self.codeBlock = _codeblock
self.params = arguments
def run(self):
myPrint("DB", "** In .genericSwingEDTRunner():: GenericSwingEDTRunner().run()... about to execute codeblock.... isOnEDT:", SwingUtilities.isEventDispatchThread())
self.codeBlock(*self.params)
myPrint("DB", "** In .genericSwingEDTRunner():: GenericSwingEDTRunner().run()... finished executing codeblock....")
_gser = GenericSwingEDTRunner(codeblock, args)
if ((isOnEDT and not ifOnEDTThenRunNowAndWait) or (not isOnEDT and not ifOffEDTThenRunNowAndWait)):
myPrint("DB", "... calling codeblock via .invokeLater()...")
SwingUtilities.invokeLater(_gser)
elif not isOnEDT:
myPrint("DB", "... calling codeblock via .invokeAndWait()...")
SwingUtilities.invokeAndWait(_gser)
else:
myPrint("DB", "... calling codeblock.run() naked...")
_gser.run()
myPrint("DB", "... finished calling the codeblock via method reported above...") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def run(self):\n self.window.mainloop()",
"def takeControl(self):\n mainloop()",
"def takeControl(self):\n mainloop()",
"def exec(self):\n if self._root.master is None:\n self._root.mainloop()",
"def run(self):\n self.ident = threading.current_thread().ident\n self.ready.set()\n self.exec_()",
"def run():\n gui = GUI()\n gui.mainloop()",
"def run(self):\n\t\tgtk.gdk.threads_init()\t\t\t# (!) important for multi-threading to work with GTK+\n\t\tself.__update_timer = gobject.timeout_add(250, self.__update, self)\n\t\tself.statusbar1.push(0, \"Ready (for about dialog; right-click to lower right corner).\")\n\t\tgtk.main()",
"def run(self):\n\n while not self.done:\n\n self.event_loop()\n\n self.update()",
"def mainloop(self):\n self.root.mainloop()",
"def mainloop(self):\n self.root.mainloop()",
"def exec(self):\n self._root.after(100, self.change_state, States.INITIAL) # enter the state once gui is setup\n super().exec()",
"def loop_run(self):\n super(EventLoop, self).loop_run()\n self.inq = self.cothread.EventQueue()",
"def mainloop(self):\n self.master.mainloop()",
"def execute_block_now(event):\n b = event.cli.current_buffer\n b.validate_and_handle()",
"def main(self):\n self.root.mainloop()",
"def run(self):\n if self.okay:\n ExtLoopWin32.run()",
"def mainloop(self):\n\t\tself.root.after(100, self.tkloop)\n\t\tself.root.mainloop()",
"def run_main_loop():\n mainloop = GObject.MainLoop()",
"def run(self):\n self.cmdloop()",
"def main(self):\n self.validate()\n self.root.mainloop()",
"def dispatch_loop(self):\n pass",
"def mainloop(self):\r\n self.bindHotkeys()\r\n self.root.mainloop()",
"def run(self):\n GLib.MainLoop().run()",
"def run(self):\n self.run()",
"def block(self):\n pass",
"def exec(self) -> bool:\n return bool(self._widget._mgui_exec())",
"def run(self):\n self.monitorTextBox.setPlainText(\"\")\n self.applyChanges()\n self.toolBox.setCurrentIndex(4)\n if self.dat.surrogateProblem == None:\n return\n tool = self.toolSelectBox.currentText()\n pg = self.dat.surrogateMethods.plugins[tool].surrogateMethod(self.dat)\n pg.loadDict(self.dat.surrogateProblem[tool])\n pg.start()\n self.pg = pg\n self.a = True\n self.timer.start(self.updateDelay)\n self.timeRunning = time.time()\n self.runButton.setEnabled(False)\n self.stopButton.setEnabled(True)\n self.setStatusBar.emit(\"Surrogate Generation Running\")",
"def _run(self):\n while(self._loop):\n pass",
"def _run_delayed_gui_load_code(self):\n #Stop the timer.\n self._delayed_gui_timer.stop()\n print(f'_run_delayed_gui_load_code() called!')\n # Try to select the first combo item after they've loaded\n self.ui.contextSelectorWidget._trySelectFirstComboItem()",
"def block(self):\n # I WILL RUN FOR EVER \n # BUT WHY ARE YOU RUNNING ?\n self.__loop.run_forever()"
] | [
"0.6536498",
"0.64465624",
"0.64465624",
"0.6426398",
"0.63144535",
"0.61656505",
"0.61306715",
"0.60988885",
"0.6066413",
"0.6066413",
"0.6001287",
"0.59971917",
"0.59842736",
"0.5967664",
"0.59582716",
"0.5954346",
"0.5944975",
"0.59409404",
"0.5900608",
"0.58864576",
"0.58551013",
"0.5854173",
"0.5829394",
"0.58157957",
"0.5808614",
"0.57962173",
"0.5795168",
"0.57730794",
"0.5766953",
"0.57541233"
] | 0.6598361 | 0 |
Implement your canvas drawing logic here, returning False will stop the rendering, returning True will continue it | def draw(self, canvas) -> bool:
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def draw (self, screen):\n drew = bool(self.draw_fn(self, screen, self.dirty))\n self.dirty = False\n return drew",
"def on_draw(self, widget, cr):\n #print \"starting to draw\"\n if self.double_buffer is not None:\n self.draw_tiles()\n cr.set_source_surface(self.double_buffer, 0.0, 0.0)\n cr.paint()\n else:\n print('Invalid double buffer')\n #print \"done drawing\"\n return False",
"def draw_animation(self, canvas, animation_tick) -> bool:\n return False",
"def on_draw_event(self, widget, ctx):\n # the _need_redraw flag doesnt work. it sometimes prevents\n # the rendering and leaving the canvas blank\n #if self._need_redraw:\n self._renderer.set_context(ctx)\n allocation = self.get_allocation()\n x, y, w, h = allocation.x, allocation.y, allocation.width, allocation.height\n self._render_figure(w, h)\n #self._need_redraw = False\n\n return False # finish event propagation?",
"def draw():",
"def paintGL(self):\n print \"Entereing paintGL\"\n if self.bDrawing == True:\n print \"Drawing was true so quit\"\n return\n \n \n self.bDrawing = True\n threadDrawGL = threading.Thread(target = self.drawGLScene)\n threadDrawGL.start()\n #self.drawGLScene()",
"def _prepare_draw(self, view=None):\n return True",
"def draw(self):\n pass",
"def draw(self):\n pass",
"def draw(self):\n pass",
"def draw(self):\n pass",
"def draw(self):\n\n for row in self._board:\n for slot in row:\n if slot == 0:\n return False\n print \"It's a draw!\"\n return True",
"def draw(self):\n\t\tpass",
"def can_draw(self,point):\n if point <= 0:\n return False\n else:\n return True",
"def draw(self):",
"def draw (self):\n screen = self.screen\n dirty = False\n for display in self.displays:\n dirty |= display.draw(screen)\n return dirty",
"def _draw(self, canvas, options):\n pass # must override in subclass",
"def draw(self):\n raise NotImplementedError",
"def draw(self):\n raise NotImplementedError",
"def draw(self):\n raise NotImplementedError",
"def draw(self):\n return self._myCanvas.draw()",
"def game_draw(self):\n pass",
"def conditionsAreMetForDrawing(self):\n\t\tcurrentController = self.controller.view().window().windowController()\n\t\tif currentController:\n\t\t\ttool = currentController.toolDrawDelegate()\n\t\t\ttextToolIsActive = tool.isKindOfClass_( NSClassFromString(\"GlyphsToolText\") )\n\t\t\thandToolIsActive = tool.isKindOfClass_( NSClassFromString(\"GlyphsToolHand\") )\n\t\t\tif not textToolIsActive and not handToolIsActive: \n\t\t\t\treturn True\n\t\treturn False",
"def draw(self):\r\n if not self.stopped:\r\n super().draw()\r\n self.next_frame()",
"def draw(self, screen):",
"def save_drawing_if_necessary(self):\n\n app_doc_data = AppDocData.instance()\n if app_doc_data.activeDrawing and app_doc_data.activeDrawing.modified:\n #if QMessageBox.Yes == QMessageBox.question(self, self.tr(\"Question\"),\n # self.tr(\"Do you want to save drawing?\"),\n # QMessageBox.Yes | QMessageBox.No):\n # self.actionSaveCliked()\n # return True\n if QMessageBox.Ignore == QMessageBox.question(self, self.tr('Continue?'),\n self.tr('Changes may not have been saved.'),\n QMessageBox.Ignore | QMessageBox.Cancel):\n return False\n return True",
"def isOnCanvas(self, x, y):\n return 0 <= x < self.width and 0 <= y < self.height",
"def draw (self):\n screen = self.screen\n dirty = False\n for z, displays in self.layers.iteritems():\n for display in displays:\n drew = display.draw(screen)\n # if made changes to the surface\n if drew:\n # set any displays that overlap this one dirty\n for d in display.overlapped:\n d.dirty = True\n dirty |= drew\n return dirty",
"def on_draw(self):\n # draw everything",
"def draw(self, surface):\n checked_color = (0, 196, 0) if self.checked else pg.Color(\"white\")\n surface.fill(pg.Color(\"black\"), self.rect)\n surface.fill(self.color, self.rect.inflate(-2,-2))\n surface.fill(pg.Color(\"white\"), self.rect.inflate(-6,-6))\n surface.fill((205,205,205), self.rect.inflate(-8,-8))\n surface.fill(checked_color, self.select_rect)"
] | [
"0.7068976",
"0.70053196",
"0.6794968",
"0.67545444",
"0.66375184",
"0.66362685",
"0.65746",
"0.6460818",
"0.6460818",
"0.6460818",
"0.6460818",
"0.64464664",
"0.64366764",
"0.6423051",
"0.63904214",
"0.6366687",
"0.6364581",
"0.63468754",
"0.63468754",
"0.63468754",
"0.62978816",
"0.6290934",
"0.6273019",
"0.6251459",
"0.62476724",
"0.62395257",
"0.62385046",
"0.6224893",
"0.61349225",
"0.61160195"
] | 0.8466848 | 0 |
Implement your canvas animation drawing logic here, returning False will stop the rendering, returning True will continue it | def draw_animation(self, canvas, animation_tick) -> bool:
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def draw(self, canvas) -> bool:\n return False",
"def draw(self):\r\n if not self.stopped:\r\n super().draw()\r\n self.next_frame()",
"def on_draw(self, widget, cr):\n #print \"starting to draw\"\n if self.double_buffer is not None:\n self.draw_tiles()\n cr.set_source_surface(self.double_buffer, 0.0, 0.0)\n cr.paint()\n else:\n print('Invalid double buffer')\n #print \"done drawing\"\n return False",
"def paintGL(self):\n print \"Entereing paintGL\"\n if self.bDrawing == True:\n print \"Drawing was true so quit\"\n return\n \n \n self.bDrawing = True\n threadDrawGL = threading.Thread(target = self.drawGLScene)\n threadDrawGL.start()\n #self.drawGLScene()",
"def draw (self, screen):\n drew = bool(self.draw_fn(self, screen, self.dirty))\n self.dirty = False\n return drew",
"def draw():",
"def draw(self):\n if self.node:\n if self.async:\n if self.cancel_draw:\n self.after_cancel(self.cancel_draw)\n self.cancel_draw = self.after(3, self._draw)\n else: self._draw()",
"def _run(self):\n self._is_running = False\n self.start()\n self._pos += 1\n self.draw(self.img)",
"def on_draw_event(self, widget, ctx):\n # the _need_redraw flag doesnt work. it sometimes prevents\n # the rendering and leaving the canvas blank\n #if self._need_redraw:\n self._renderer.set_context(ctx)\n allocation = self.get_allocation()\n x, y, w, h = allocation.x, allocation.y, allocation.width, allocation.height\n self._render_figure(w, h)\n #self._need_redraw = False\n\n return False # finish event propagation?",
"def update(self):\n check_key_press = lambda key: self._window.was_key_pressed(key)\n frame = self._current_image.copy()\n\n for key, event in self._key_events.items():\n if check_key_press(key):\n event()\n\n for annotation in self._annotations:\n annotation.draw(frame)\n\n if self._annotation_in_progress is not None:\n self._annotation_in_progress.draw(frame)\n\n self.show_controls(frame)\n\n self._window.draw(frame)\n return not self._window.should_quit",
"def game_draw(self):\n pass",
"def draw(self):",
"def draw(self):\n pass",
"def draw(self):\n pass",
"def draw(self):\n pass",
"def draw(self):\n pass",
"def draw(self):\n\t\tpass",
"def _prepare_draw(self, view=None):\n return True",
"def EndDraw(self):\r\n\r\n pass",
"def repaint(self):\n self.screen.blit(self.source, (0, 0))\n self.lcd.draw(self.lcddraw)\n if self.drawmode & self.DRAW_CIRCLE:\n self.plot_circle()\n pygame.display.flip()",
"def draw(canvas):\n\n max_row, max_col = canvas.getmaxyx()\n canvas.nodelay(True)\n\n COROUTINES.append(fire(canvas, max_row // 2, max_col // 2))\n COROUTINES.append(animate_spaceship(canvas, max_row // 2, max_col // 2 - 2, max_row, max_col))\n COROUTINES.append(fill_orbit_with_garbage(canvas))\n for i in range(STARS_AMOUNT):\n column = random.randint(1, max_col - 1)\n row = random.randint(1, max_row - 1)\n symbol = random.choice('+*.:')\n COROUTINES.append(blink(canvas, row, column, random.randint(0, 10), symbol))\n\n while COROUTINES:\n curses.curs_set(False)\n canvas.border()\n for coroutine in COROUTINES:\n try:\n coroutine.send(None)\n except StopIteration:\n COROUTINES.remove(coroutine)\n if len(COROUTINES) == 0:\n break\n canvas.refresh()\n time.sleep(TIC_TIMEOUT)",
"def run_animation(self):\n self.animation = True\n self.fig = plt.figure()\n anim_running = True\n\n def onClick(event):\n nonlocal anim_running\n if anim_running:\n anim.event_source.stop()\n anim_running = False\n else:\n anim.event_source.start()\n anim_running = True\n\n self.fig.canvas.mpl_connect('button_press_event', onClick)\n anim = FuncAnimation(self.fig, self.update, fargs=None, interval=5)\n plt.show()",
"def _logic(self):\n yes = self.yes_button.was_pressed or self.yes_button.pressed\n no = self.no_button.was_pressed or self.no_button.pressed\n\n # render\n if self.render_timer.finished:\n # start = time()\n self.window_renderer.update(self.buffer_image.tobytes())\n # debug(\"render time: %s\", time() - start)\n self.render_timer.start()\n\n if self.state == STATE_DEFAULT:\n if (no and not self.disable_quit) or (no and yes):\n self._enter_state(STATE_EXIT_PROMPT)\n elif yes:\n self._enter_state(STATE_PREPARE)\n\n elif self.state == STATE_EXIT_PROMPT:\n if yes:\n return False\n elif no:\n self._enter_state(STATE_DEFAULT)\n\n elif self.state == STATE_PREPARE:\n if no:\n self._enter_state(STATE_DEFAULT)\n elif self.countdown_timer.finished:\n self.pictures_taken = list()\n self.camera_controller.clear_workdir()\n self._enter_state(STATE_PICTURE_COUNTDOWN)\n\n elif self.state == STATE_PICTURE_COUNTDOWN:\n if no:\n self._enter_state(STATE_DEFAULT)\n elif self.countdown_timer.finished:\n self.pictures_taken.append(self.camera_controller.capture_photo())\n self._enter_state(STATE_PICTURE_TAKEN)\n else:\n self.window.find_by_name(NAME_GET_STARTED).text = \"\" \\\n + str(len(self.pictures_taken) + 1) + \" of \" + str(self.picture_count) \\\n + \"\\n\" + str(int(self.countdown_timer.remaining) + 1)\n\n elif self.state == STATE_PICTURE_TAKEN:\n if no:\n self._enter_state(STATE_DEFAULT)\n elif self.countdown_timer.finished:\n if len(self.pictures_taken) >= self.picture_count:\n t = threading.Thread(target=self._upload_to_twitter)\n t.start()\n self._enter_state(STATE_PRINT)\n else:\n self._enter_state(STATE_PICTURE_COUNTDOWN)\n\n elif self.state == STATE_PRINT:\n if no:\n self._enter_state(STATE_DEFAULT)\n elif yes:\n self._enter_state(STATE_PRINTING)\n\n elif self.state == STATE_PRINTING:\n if no:\n self._enter_state(STATE_DEFAULT)\n else:\n strip_file = self.create_strip()\n args = self.print_command.replace('{filename}', strip_file).split()\n subprocess.Popen(args)\n self._enter_state(STATE_COMPLETED)\n\n elif self.state == STATE_COMPLETED:\n if yes or no or self.countdown_timer.finished:\n self._enter_state(STATE_DEFAULT)\n\n else:\n raise RuntimeError(\"The app is in an unknown state: \" + str(self.state))\n\n return True",
"def _update_anim(self):\n if self._skip_frames > 1:\n # Do not render while _skip_frames is > 1\n self._skip_frames -= 1\n else:\n # Render frame\n self._visualization.taskMgr.step()\n # Calculate number of frames that need to be skipped\n self._skip_frames = int(1 / self._fps / self._dt)",
"def drawStar(duration):\n # START CODE HERE #\n\n\n pass\n # END CODE HERE # (remove the pass statement)",
"def pre_draw(self):",
"def draw(self):\n self.figure.canvas.draw_idle()",
"def draw(self):\n # IMPLEMENT ME\n \"\"\"\n GRectangle(x=GAME_WIDTH/2,y=GAME_HEIGHT/2,\n width=GAME_WIDTH,height=GAME_HEIGHT,\n fillcolor=introcs.RGB(0,0,0)).draw(self.view)\n if self.getState() == STATE_INACTIVE:\n self.getText().draw(self.view)\n if self.getState() == STATE_PAUSED:\n self.getText().draw(self.view)\n if not self.getWave() is None:\n self.getWave().draw(self.view)\n if self.getState() == STATE_COMPLETE:\n self.getText().draw(self.view)\n if self.getState() == STATE_PAUSED or self.getState() == STATE_ACTIVE or self.getState() == STATE_COMPLETE:\n self.getText().draw(self.view)\n\n GRectangle(x=GAME_WIDTH/2,y=GAME_HEIGHT/2,\n width=GAME_WIDTH,height=GAME_HEIGHT,\n fillcolor=introcs.RGB(0,0,0)).draw(self.view)\"\"\"\n if not self.getText() is None:\n self.getText().draw(self.view)\n if not self.getWave() is None:\n self.getWave().draw(self.view)",
"def _draw(self, canvas, options):\n pass # must override in subclass",
"def draw(self):\n if self.is_clicked:\n pg.draw.circle(self.window, self.color, (self.x, self.y), self.r, 0)\n else:\n pg.draw.circle(self.window, self.color, (self.x, self.y), self.r, 1)"
] | [
"0.78179634",
"0.68488324",
"0.6405945",
"0.6315539",
"0.6288791",
"0.6209565",
"0.6140665",
"0.6124815",
"0.6104606",
"0.60049677",
"0.5990784",
"0.59893525",
"0.5976392",
"0.5976392",
"0.5976392",
"0.5976392",
"0.59686214",
"0.5960046",
"0.5932858",
"0.5920046",
"0.59116364",
"0.58809614",
"0.58726156",
"0.5864331",
"0.58313143",
"0.5804074",
"0.57993144",
"0.5795677",
"0.5792726",
"0.57680404"
] | 0.8034474 | 0 |
Respond to theme load ins here | def load_theme_values(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def on_load_theme (self):\n\n\t\tif self.has_started:\n\t\t\tself.init_buffers()\n\t\t\tself.redraw_background()\n\t\t\tself.redraw_foreground()",
"def on_load(self):\n pass",
"def on_load(self):\n pass",
"def onStartup(event):\n\n plugins = getPlugins()\n\n for themeDirectory in iterDirectoriesOfType(THEME_RESOURCE_NAME):\n if themeDirectory.directory in reload_paths: # only for sauna.reload!\n pluginSettings = getPluginSettings(themeDirectory, plugins)\n\n for name, plugin in plugins:\n plugin.onDiscovery(themeDirectory.__name__,\n pluginSettings[name],\n pluginSettings)",
"def on_load(self):",
"def dummy_loader(cls, context):\n log.warning(\"theme is not set -- add a theme value to your site settings\")\n theme_json = DEFAULT\n hasher = hashlib.md5()\n hasher.update(theme_json.encode(\"utf-8\"))\n theme_hash = hasher.hexdigest()\n\n theme_data = json.loads(theme_json)\n\n theme_data[\"_moya\"] = {\"path\": None, \"hash\": theme_hash}\n return theme_data",
"def _post_load(self):\n pass",
"def on_load(self):\n self.__init__()",
"def handle_reload_toolbox(self):",
"async def _theme_heist(self, ctx, theme):\r\n theme = theme.title()\r\n guild = ctx.guild\r\n\r\n if not os.path.exists(str(bundled_data_path(self)) + \"/{}.txt\".format(theme)):\r\n themes = [os.path.join(x).replace('.txt', '')\r\n for x in os.listdir(str(bundled_data_path(self))) if x.endswith(\".txt\")]\r\n msg = (\"I could not find a theme with that name. Available Themes:\"\r\n \"```\\n{}```\".format('\\n'.join(themes)))\r\n else:\r\n msg = await self.thief.theme_loader(guild, theme)\r\n\r\n await ctx.send(msg)",
"def use_my_theme():\n # register and enable the theme\n alt.themes.register(\"my_theme\", my_theme)\n alt.themes.enable(\"my_theme\")",
"def use_my_theme():\n # register and enable the theme\n alt.themes.register(\"my_theme\", my_theme)\n alt.themes.enable(\"my_theme\")",
"def request_plugins(self):",
"def on_startup(self) -> None:\n ...",
"def postLoad(self):\n pass",
"def refresh(self):\n self._themes = {}\n for theme in starchain(ldr(self.app) for ldr in self.loaders):\n if self.valid_app_id(theme.application):\n self.themes[theme.identifier] = theme\n self.register_theme_assets()",
"def __init_on_load__(self):",
"def on_init(self):\n self.write_log(\"策略初始化\")\n self.load_bar(1)",
"def post_setup(self, context):\n pass",
"def load(self):\n\n super().load()\n self.check_dcss()\n self.check_discord()",
"def loader(cls, fs):\n\n def load(context=None):\n if context is None:\n context = pilot.context\n name = context.get(\".sys.site.theme\", \"default\")\n\n path = \"{}.json\".format(name)\n try:\n theme = cls.read(fs, path, context=context)\n except Exception as e:\n log.warning(\"unable to read theme file '%s' (%s)\", path, text_type(e))\n\n if name != \"default\":\n return load(\"default\")\n\n log.error(\"unable to load 'default' theme\")\n theme = None\n\n return theme\n\n return load",
"def init_ui(self):\n self.parent.title(\"Roku Player Controller\")\n self.style.theme_use(\"default\")",
"def packaged_themes_loader(app):\n themes_path = os.path.join(app.root_path, 'themes')\n if os.path.exists(themes_path):\n return load_themes_from(themes_path)\n else:\n return ()",
"def on_init(self):\n self.write_log(\"策略初始化\")\n\n self.load_bar(10)",
"def on_init(self):\n self.write_log(\"策略初始化\")\n self.load_bar(10)",
"def on_init(self):\n self.write_log(\"策略初始化\")\n self.load_bar(10)",
"def before_request():\r\n\r\n\tinit_classes()",
"def plugin_loaded():\n events.broadcast(\"plugin_loaded\")",
"def updateTheme(self):\n self.myUpdate(stateDict=None)",
"def post_start(self):"
] | [
"0.68556285",
"0.66520184",
"0.66520184",
"0.6610389",
"0.64057696",
"0.62862283",
"0.61794144",
"0.6151126",
"0.6137831",
"0.59524107",
"0.59130126",
"0.59130126",
"0.5907942",
"0.58921796",
"0.58286834",
"0.58206403",
"0.57867295",
"0.5769557",
"0.576859",
"0.5764957",
"0.5720274",
"0.5701185",
"0.5699979",
"0.56952167",
"0.5665115",
"0.5665115",
"0.5621124",
"0.5608668",
"0.55465263",
"0.5542513"
] | 0.6718465 | 1 |
Starts a setup mode that is used for moving, resizing and other various changes that the user might setup | def start_setup(self, setup_type):
# Persist the user preferences when we end our setup
if (self.setup_type != "" and not setup_type):
self.setup_type = setup_type
rect = self.canvas.get_rect()
self.x = int(rect.x)
self.y = int(rect.y)
self.width = int(rect.width)
self.height = int(rect.height)
self.preferences.persist_preferences({
self.id + '_x': self.x,
self.id + '_y': self.y,
self.id + '_width': self.width,
self.id + '_height': self.height
})
# Start the setup state
elif self.setup_type != setup_type:
self.setup_type = setup_type
if (self.setup_type == "position"):
x, y = ctrl.mouse_pos()
self.canvas.move(x, y) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setup():\n setFormat()\n setFilename()\n setScreenMode()",
"def setup_mode():\n status_label.color = WHITE\n status_label.text = \"-SET-\"\n\n ave_label.color = BLACK # Turn off average label and value display\n ave_value.color = BLACK\n\n max_value.text = str(MAX_RANGE_F) # Display maximum range value\n min_value.text = str(MIN_RANGE_F) # Display minimum range value\n\n time.sleep(0.8) # Show SET status text before setting parameters\n status_label.text = \"\" # Clear status text\n\n param_index = 0 # Reset index of parameter to set\n\n setup_state = \"SETUP\" # Set initial state\n while setup_state == \"SETUP\":\n # Select parameter to set\n setup_state = \"SELECT_PARAM\" # Parameter selection state\n while setup_state == \"SELECT_PARAM\":\n param_index = max(0, min(2, param_index))\n status_label.text = SETUP_COLORS[param_index][0]\n image_group[param_index + 226].color = BLACK\n status_label.color = BLACK\n time.sleep(0.25)\n image_group[param_index + 226].color = SETUP_COLORS[param_index][1]\n status_label.color = WHITE\n time.sleep(0.25)\n\n param_index -= get_joystick()\n\n _buttons = panel.events.get()\n if _buttons and _buttons.pressed:\n if _buttons.key_number == BUTTON_UP: # HOLD button pressed\n param_index = param_index - 1\n if _buttons.key_number == BUTTON_DOWN: # SET button pressed\n param_index = param_index + 1\n if _buttons.key_number == BUTTON_HOLD: # HOLD button pressed\n play_tone(1319, 0.030) # Musical note E6\n setup_state = \"ADJUST_VALUE\" # Next state\n if _buttons.key_number == BUTTON_SET: # SET button pressed\n play_tone(1319, 0.030) # Musical note E6\n setup_state = \"EXIT\" # Next state\n\n # Adjust parameter value\n param_value = int(image_group[param_index + 230].text)\n\n while setup_state == \"ADJUST_VALUE\":\n param_value = max(32, min(157, param_value))\n image_group[param_index + 230].text = str(param_value)\n image_group[param_index + 230].color = BLACK\n status_label.color = BLACK\n time.sleep(0.05)\n image_group[param_index + 230].color = SETUP_COLORS[param_index][1]\n status_label.color = WHITE\n time.sleep(0.2)\n\n param_value += get_joystick()\n\n _buttons = panel.events.get()\n if _buttons and _buttons.pressed:\n if _buttons.key_number == BUTTON_UP: # HOLD button pressed\n param_value = param_value + 1\n if _buttons.key_number == BUTTON_DOWN: # SET button pressed\n param_value = param_value - 1\n if _buttons.key_number == BUTTON_HOLD: # HOLD button pressed\n play_tone(1319, 0.030) # Musical note E6\n setup_state = \"SETUP\" # Next state\n if _buttons.key_number == BUTTON_SET: # SET button pressed\n play_tone(1319, 0.030) # Musical note E6\n setup_state = \"EXIT\" # Next state\n\n # Exit setup process\n status_label.text = \"RESUME\"\n time.sleep(0.5)\n status_label.text = \"\"\n\n # Display average label and value\n ave_label.color = YELLOW\n ave_value.color = YELLOW\n return int(alarm_value.text), int(max_value.text), int(min_value.text)",
"def startMode(self):\n raise NotImplementedError('startMode() should be implemented')",
"def setUp(self):\r\n self.caption = \"mirra extending classes\" # window name\r\n self.size = 640, 480 #window size\r\n self.pos = 100,100 # window top left location\r\n self.fullScreen = 0 # if fullScreen is on it will overwrite your pos and size to match the display's resolution\r\n self.frameRate = 15 # set refresh framerate\r",
"def setup(self):\n # if not system.restore_snapshot():\n # self.log.debug(\"No snapshot to restore, if this is not expected please contact automation team\")\n crindsim.set_mode(\"manual\")\n pos.connect()\n pos.sign_on()",
"def aimMode_Setup(self, state):\n\n pass",
"def renderSetup(style, *args):\n # set up confirm dialog to pop up window to do these things (replace the orig buttons and partial command)\n dial = cmds.confirmDialog(t=\"Render Setup\", message=\"Choose how you'd like to initially setup the current scene:\", button=[\"Generic\", \"Arnold\", \"Maxwell\", \"VRay\", \"Cancel\"])\n\n if dial != \"Cancel\":\n # sets the common setting regarless\n lgt.setCommon()\n\n if dial == \"Arnold\":\n lgt.setArnold()\n if dial == \"Vay\":\n lgt.setVray()\n if dial == \"Maxwell\":\n lgt.setMaxwell()",
"def setup():\r\n #this happens just once\r\n size(width, height) #instead of create_canvas\r",
"def on_setup_btn(self):\n if self.state == self.INIT:\n self.send_rtsp_request(self.SETUP)",
"def setup(self):\n # Create your sprites and sprite lists here\n self.game: Game = Game(SCREEN_WIDTH, SCREEN_HEIGHT, TILE_SIZE, 1, grid_layers = 4)\n self.game.game_message = \"Lead the Rabbit home\"\n\n # show the menu so that we see the instructions\n self.game.menu.button_list[0].text = \"Start\"\n self.game.menu.is_visible = True",
"def change_mode(self):\n master.destroy()\n os.system(\"add_mode_run.py\")",
"def setup_game(self):",
"def setUp(self):\n #if UI object not found. the watcher method will be invoked\n d.watcher('AUTO_FC_WHEN_ANR').when(text='ANR').when(text='强行关闭') .press('enter')\n d.wakeup() #wakeup device ",
"def setup_pymol():\n pymol.finish_launching() # Prevent threading errors\n # Configure global settings\n cmd.set('scene_buttons', 1)\n cmd.set('matrix_mode', 1)\n cmd.set('movie_panel', 1)\n # Configure quality settings\n cmd.mset(\"1 x500\")\n cmd.set('ray_trace_frames', 1)\n cmd.viewport(800, 800)",
"def setup(self): \n # Navigate to POS screen\n pos.connect()",
"def screen_setup(screen_size):\n window = turtle.Screen()\n window.bgcolor(\"black\")\n window.title(\"Maze Game\")\n window.setup(screen_size, screen_size)",
"def _display_setup(self):\r\n display_file = \"{}/display.json\".format(self.settings_dir)\r\n with open(display_file) as json_file:\r\n win_settings = json.load(json_file)\r\n self.win = visual.Window(**win_settings)\r\n framerate = self.win.fps()\r\n self.frame_duration = 1.0/framerate\r\n self.mouse = event.Mouse(visible=False, win=self.win)",
"def do_activate(self, *args, **kwargs):\n self.register_signals()\n self.perform_setup()\n assert self.main_window\n self.main_window.show()\n self.hold()",
"def start_new_game(self, mode): \n self.display.clear() \n #self.ui.hide()\n if self.selected_speed == \"speed Slow\":\n self.game_manager.set_players_speed(1.9)\n elif self.selected_speed == \"speed Medium\":\n self.game_manager.set_players_speed(3)\n elif self.selected_speed == \"speed Fast\":\n self.game_manager.set_players_speed(5)\n self.game_manager = GameManager(self.display, self.ui, mode, GameState.Running, self.game_manager.player1, self.game_manager.player2)",
"def start(self):\r\n self.setDriver('ST', 1)",
"def ready(self):\r\n\t\t# Remove attract mode from mode queue - Necessary?\r\n\t\tself.game.modes.remove(self)\r\n\t\t# Initialize game\t\r\n\t\tself.game.start_game()\r\n\t\t# Add the first player\r\n\t\tself.game.add_player()\r\n #self.game.add_player()\r\n\t\t# Start the ball. This includes ejecting a ball from the trough.\r\n\t\tself.game.start_ball()",
"def onClick(self):\n self.app.setActiveMode(\"start\")",
"def start(self):\n self.active = True",
"def start(self):\n # asserts preconditions are met\n #assert self.validGameSettings()\n\n #draws initial welcome screen\n #self._text = GLabel(text=\"Press 'S' to Play\")\n #self._text.draw(self.view)\n\n # initializing instance variables\n self.setState(STATE_INACTIVE)\n self.setWave(None)\n self.setText(None)\n self.lastkeys = 0 #ADD MORE ATTRIBUTES\n\n # draws iniital welcome screen\n self.welcomeScreen()",
"def setup_callback():\n self.setup_window.deiconify()",
"def on_pre_enter(self):\n self.setup()\n self.start()",
"def enable_setup(self):\n self.high_ver_entry.config(state=\"normal\")\n self.low_ver_entry.config(state=\"normal\")\n self.left_hor_entry.config(state=\"normal\")\n self.right_hor_entry.config(state=\"normal\")",
"def enable_start(self, *args):\n but_start.configure(state=GL.NORMAL)",
"def open(self):\n windowFlags = self.getWindowFlags(self.settings)\n self.surface = pygame.display.set_mode(self._resolution, windowFlags)\n self._printVideoInfo(pygame.display.Info())\n logger.info(\"Initialized display with driver: \" + pygame.display.get_driver())\n\n self.surface.fill(self._skin.guiColor(\"Background\"))\n self._initializePanels(self._resolution, self._skin)\n pygame.display.flip()\n\n self._statusLoop.statusProvider = self.getStatusProvider(self.settings)",
"def setupNewGame(self):\r\n self.level = 1\r\n self.num_cows = 2\r\n self.num_farmers = 1\r\n self.levelHeading = Text(self.gameDisplay, 120, 425, 175, self.light_orange, \"Farm 1\")\r\n self.shield_indicator.image = self.greenShield\r\n updatedHeading = self.levelHeading\r\n self.startUX[0] = updatedHeading"
] | [
"0.6800144",
"0.6734263",
"0.66898155",
"0.66412646",
"0.6498546",
"0.633072",
"0.62841946",
"0.6278032",
"0.6212285",
"0.6206775",
"0.62048006",
"0.6197723",
"0.61908454",
"0.6190276",
"0.6158729",
"0.6156555",
"0.61511356",
"0.6121974",
"0.6118286",
"0.6098591",
"0.6072834",
"0.60583925",
"0.60570484",
"0.6044386",
"0.60377973",
"0.60302305",
"0.5998851",
"0.59982586",
"0.5993802",
"0.5980473"
] | 0.7203417 | 0 |
Extract bbox info from file name. | def get_bbox(fname):
fname = fname.split('_') # fname -> list
i = fname.index('bbox')
return map(float, fname[i+1:i+5]) # m
| {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_bbox(fname):\r\n fname = fname.split('_') # fname -> list\r\n i = fname.index('bbox')\r\n return list(map(float, fname[i+1:i+5])) # m\r",
"def get_bbox(fname):\r\n\r\n fname = fname.split('_') # fname -> list\r\n i = fname.index('bbox')\r\n return list(map(float, fname[i+1:i+5])) # m\r",
"def bbox(self, filename: str) -> str:\n temp = \"/tmp/\" + os.path.basename(filename)\n im = cv2.imread(filename)\n # Draw rectangle for on-screen debugging.\n match = re.search(r\"l(\\d+)_r(\\d+)_t(\\d+)_b(\\d+)_w(\\d+)_h(\\d+)\", filename)\n l = int(match.group(1))\n r = int(match.group(2))\n t = int(match.group(3))\n b = int(match.group(4))\n w = int(match.group(5))\n h = int(match.group(6))\n assert w == r - l\n assert h == b - t\n cv2.rectangle(im, (l, t), (r, b), BLUE, 3)\n cv2.imwrite(temp, im)\n return temp",
"def format_bbox_file(self, img_name, data):\r\n\r\n with open(self.bboxes_local, 'w+') as fbbox:\r\n # remove path\r\n bboxes = data.split(' ')[1:]\r\n for i in range(0, len(bboxes), 4):\r\n cur_bbox = bboxes[i:i+4]\r\n fbbox.write(img_name + ' ' + ' '.join(cur_bbox) + '\\n')",
"def get_bbox_data(self):\r\n with open(self.bboxes_local, 'r') as fbbox:\r\n data = fbbox.read()\r\n\r\n return data",
"def test_get_bounding_box(self):\n\n # Note there are two possible correct values of bbox depending on\n # the version of gdal:\n # http://trac.osgeo.org/gdal/wiki/rfc33_gtiff_pixelispoint\n\n # Get gdal version number\n x = gdal.VersionInfo('').replace('dev', '').split()\n y = x[1].split('.')[:2]\n z = ''.join(y) # Turn into number and\n if z.endswith(','):\n z = z[:-1] # Remove trailing comma\n\n # Reference bbox for vector data\n ref_bbox = {'tsunami_building_exposure.shp': [150.15238387897742,\n -35.71084183517241,\n 150.18779267086208,\n -35.70131768155173]}\n\n # Select correct reference bbox for rasters\n if float(z) < 17:\n ref_bbox['Earthquake_Ground_Shaking_clip.tif'] = [99.3641696,\n -2.2031806,\n 102.2411696,\n -0.0041806]\n else:\n ref_bbox['Earthquake_Ground_Shaking_clip.tif'] = [99.36,\n -2.199,\n 102.237,\n 0.0]\n\n for filename in ['Earthquake_Ground_Shaking_clip.tif',\n 'tsunami_building_exposure.shp']:\n abspath = os.path.join(TESTDATA, filename)\n bbox = get_bounding_box(abspath)\n msg = ('Got bbox %s from filename %s, but expected %s '\n % (str(bbox), filename, str(ref_bbox[filename])))\n assert numpy.allclose(bbox, ref_bbox[filename]), msg\n\n # Check the conversions\n bbox_string = bboxlist2string(bbox)\n\n # Check the check :-)\n check_bbox_string(bbox_string)\n\n # Check that it works for layer objects instantiated from file\n L = read_layer(abspath)\n L_bbox = L.get_bounding_box()\n msg = ('Got bbox %s from filename %s, but expected %s '\n % (str(L_bbox), filename, str(ref_bbox[filename])))\n assert numpy.allclose(L_bbox, ref_bbox[filename]), msg\n\n # Check that it works for layer objects instantiated from data\n if L.is_raster:\n D = Raster(data=L.get_data(),\n projection=L.get_projection(),\n geotransform=L.get_geotransform())\n elif L.is_vector:\n D = Vector(data=L.get_data(),\n projection=L.get_projection(),\n geometry=L.get_geometry())\n else:\n msg = 'Unexpected layer object: %s' % str(L)\n raise RuntimeError(msg)\n\n # Check that get_bounding_box works for data instantiated layers\n D_bbox = D.get_bounding_box()\n msg = ('Got bbox %s from layer %s, but expected %s '\n % (str(D_bbox), str(D), str(L_bbox)))\n assert numpy.allclose(D_bbox, L_bbox), msg",
"def _populate_bbox_data(self, filename: str) -> None:\n if self.box_images.get(filename) is None:\n return []\n\n target = self.parse_voc_xml(ET_parse(self.box_targets[filename]).getroot())\n\n # TO-DO\n # The following function can also be used to output pose for each bbox\n bbox = self.get_objects(target)\n\n return bbox",
"def bbox_from_json(bbox_file):\n with open(bbox_file, 'r') as f:\n bbox = np.array(json.load(f)['bbox']).astype(np.float32)\n ul_corner = bbox[:2]\n center = ul_corner + 0.5 * bbox[2:]\n width = max(bbox[2], bbox[3])\n scale = width / 200.0\n # make sure the bounding box is rectangular\n return center, scale",
"def bbox_from_json(bbox_file):\n with open(bbox_file, 'r') as f:\n bbox = np.array(json.load(f)['bbox']).astype(np.float32)\n ul_corner = bbox[:2]\n center = ul_corner + 0.5 * bbox[2:]\n width = max(bbox[2], bbox[3])\n scale = width / 200.0\n # make sure the bounding box is rectangular\n return center, scale",
"def load_bbox(depth_dir, view):\n base_filename = os.path.join(depth_dir, \"%05d\" % view)\n if os.path.exists(base_filename + \".npz\"):\n npz_dict = np.load(base_filename + \".npz\")\n if 'bbox' in npz_dict:\n crop = npz_dict['bbox']\n else:\n crop = None\n else:\n crop = None\n if crop is None:\n crop_files = glob(base_filename + \"_bbox*\")\n if len(crop_files) == 1:\n crop = np.load(crop_files[0])\n elif len(crop_files) > 1:\n error(\"Crop file base '%s_bbox' matches multiple files\" % base_filename)\n return crop",
"def read_bounding_boxes(filename):\n f = open(filename)\n objects = []\n weight = 0\n height = 0\n for line in f:\n print(line)\n first_word = line.split(';')[0]\n if first_word == \"Dimensions\":\n weight = line.split(';')[1]\n height = line.split(';')[2]\n if first_word == \"Object\":\n objects.append((line.split(';')[1], line.split(';')[2], line.split(';')[4],\n line.split(';')[5], line.split(';')[6], line.split(';')[7]))\n return weight, height, objects",
"def getBoundingBox(filepath):\n datasource = ogr.Open(filepath)\n geo_dict = {}\n\n for layer in datasource:\n layer_name = layer.GetDescription()\n ext = layer.GetExtent()\n bbox = [ext[0], ext[2], ext[1], ext[3]]\n\n try:\n spatial_ref = layer.GetSpatialRef()\n spatial_ref.AutoIdentifyEPSG()\n crs = spatial_ref.GetAuthorityCode(None)\n except Exception as e:\n logger.debug(\"Error extracting EPSG CODE from layer {}: \\n {}\".format(layer_name, e))\n crs = None\n\n # Patch GDAL > 3.2 for GML https://github.com/OSGeo/gdal/issues/2195\n if int(osgeo.__version__[0]) >= 3 and int(osgeo.__version__[2]) < 2 and datasource.GetDriver().GetName() ==\"GML\":\n bbox = [ext[2], ext[0], ext[3], ext[1]]\n\n geo_dict[layer_name] = {\"bbox\": bbox, \"crs\": crs}\n\n if bbox == null_island or crs is None:\n logger.debug(\"Layer {} does not have identifiable geographic extent. CRS may be missing.\".format(layer_name))\n del geo_dict[layer_name][\"crs\"]\n\n bbox_merge = hf.bbox_merge(geo_dict, filepath)\n\n spatial_extent = None\n\n if bbox_merge is not None:\n if len(bbox_merge) != 0:\n spatial_extent = bbox_merge\n\n return spatial_extent",
"def load_bb(filename):\n in_data = gdal.Open(filename, 0)\n geotransform = in_data.GetGeoTransform()\n nx = in_data.RasterXSize\n ny = in_data.RasterYSize\n return geotransform2bb(geotransform, nx, ny)",
"def get_annotation_by_name(ImgName, df, default_size = (640,640)):\n ImgName = ImgName.split('.')[0] + '.jpg'\n bb_boxes = df[df['Frame'] == ImgName].reset_index()\n labels = np.zeros(len(bb_boxes))\n bbox = np.zeros((len(bb_boxes), 4))\n for i in range(len(bb_boxes)):\n #resize bbox to default size\n labels[i] = bb_boxes.iloc[i]['label']\n bbox[i,0] = bb_boxes.iloc[i]['center_x']\n bbox[i,1] = bb_boxes.iloc[i]['center_y']\n bbox[i,2] = bb_boxes.iloc[i]['w']\n bbox[i,3] = bb_boxes.iloc[i]['h']\n #print(bbox)\n #print(len(bb_boxes))\n return labels, bbox",
"def getBoundingBox(fileList):\n return IrgGeoFunctions.getImageBoundingBox(fileList[0])",
"def layer_bbox(m, names, proj_target, bbox=None):\n for layer in (l for l in m.layers if l.name in names):\n # it may as well be a GPX layer in WGS84\n layer_proj = mapnik.Projection(layer.srs)\n box_trans = mapnik.ProjTransform(layer_proj, proj_target)\n lbbox = box_trans.forward(layer.envelope())\n if bbox:\n bbox.expand_to_include(lbbox)\n else:\n bbox = lbbox\n return bbox",
"def find_bbox(pred_file_path: str, train_file_path: str) -> Dict:\n\n f_pred = open(pred_file_path, \"r\")\n pred_result = f_pred.readlines()\n f_pred.close()\n\n img_index = get_img_index(pred_result)\n\n img_names = get_image_names(train_file_path)\n\n if len(img_index) - 1 != len(img_names):\n return \"There is mismatch between the number of predictions and the number of images.\"\n\n # Create dictionary with the img name as the key and the bbox information as values.\n target_labels = [\"TableCaption\", \"TableBody\", \"TableFootnote\", \"Paragraph\", \"Table\"]\n result = {}\n for i, name in enumerate(img_names):\n key = name\n start = img_index[i] + 1\n end = img_index[i + 1]\n unfiltered_value = pred_result[start:end]\n filtered_value = [\n v for v in unfiltered_value if v.split(\":\")[0] in target_labels\n ]\n result[key] = filtered_value\n\n return result",
"def read_mesh_nodes_bbox(filename):\n nodes = read_mesh_nodes(filename)\n bbox = read_mesh_bbox(nodes=nodes)\n return nodes, bbox",
"def get_bbox(im_file, visualize=False):\n im = cv2.imread(im_file)\n non_black_pixels = im.any(axis=-1).nonzero() \n bbox = [min(non_black_pixels[1][:]), min(non_black_pixels[0][:]),\n max(non_black_pixels[1][:]), max(non_black_pixels[0][:])]\n if visualize:\n vis_bbox(im, bbox)\n plt.show()\n return bbox",
"def parseBoundaryField(fn):\n content = getFileContent(fn)\n if content is not None:\n return parseBoundaryContent(content)\n else:\n return None",
"def geoBoundsMetadata(filename,format=\"shapefile\"):\n if format==\"shapefile\":\n with fiona.open(filename, 'r') as c:\n bnd= c.bounds\n bnd=(bnd[0],bnd[2],bnd[3],bnd[1])\n return \"ENVELOPE{0}\".format(bnd)\n\n else:\n with rasterio.open(filename,'r') as c:\n bnd= c.bounds\n bnd=(bnd[0],bnd[2],bnd[3],bnd[1])\n return \"ENVELOPE{0}\".format(bnd)",
"def read_annotation_yolov5(bbox_path):\n\n # image_paths = get_lists_in_dir(rawImage_dir)\n\n dw = 1./(camera_resolution[0]) # 1 / image width\n dh = 1./(camera_resolution[1]) # 1 / image height\n\n # Read in bbox coordinate information from bbox_information.txt\n dimension_list = []\n with open(bbox_path, 'r') as annotation_file:\n content = annotation_file.read().splitlines()\n\n for n in content:\n # x = int(n.split()[0])+int(n.split()[2])/2\n # y = int(n.split()[1])+int(n.split()[3])/2\n # w = int(n.split()[2])\n # h = int(n.split()[3])\n #\n # x = x*dw\n # w = w*dw\n # y = y*dh\n # h = h*dh\n\n bb = n.split()\n w = int(bb[2])\n h = int(bb[3])\n\n start_x = int(bb[0])\n start_y = int(bb[1])\n\n center_x = start_x + w / 2\n center_y = start_y + h / 2\n\n x = center_x * dw\n y = center_y * dh\n w = w * dw\n h = h * dh\n \n dimension_list.append((x, y, w, h))\n\n return dimension_list",
"def bbox_img(img, bbox):\n if len(bbox) == 4:\n return img[bbox[1]:bbox[3], bbox[0]:bbox[2]]\n else:\n return img",
"def getbbox(self):\n pass",
"def bbox_coordinates(label_sitk):\n\n #Setting Bounding Box\n F_statistics = sitk.LabelShapeStatisticsImageFilter()\n\n F_statistics.Execute(label_sitk)\n bbox_dims = F_statistics.GetBoundingBox(1)\n\n spacer = 3\n xmin = bbox_dims[0]-spacer\n xmax = bbox_dims[1]+spacer\n ymin = bbox_dims[2]-spacer\n ymax = bbox_dims[3]+spacer\n zmin = bbox_dims[4]-spacer\n zmax = bbox_dims[5]+spacer\n\n p1 = [xmin-spacer, ymin, zmin]\n p2 = [xmin, ymin, zmax]\n p3 = [xmin, ymax, zmin]\n p4 = [xmin, ymax, zmax]\n p5 = [xmax, ymin, zmin]\n p6 = [xmax, ymin, zmax]\n p7 = [xmax, ymax, zmin]\n p8 = [xmax, ymax, zmax]\n bbox_pts = [p1, p2, p3, p4, p5, p6, p7, p8]\n\n return bbox_pts",
"def load_annotations(path, img_w, img_h):\n bboxes = []\n with open(path, 'r') as file:\n for row in file:\n _, xc , yc, w, h = row.split()\n xc = float(xc)*img_w\n yc = float(yc)*img_h\n w = float(w)*img_w\n h = float(h)*img_h\n bboxes.append([xc - w/2 , yc - h/2, xc + w/2 , yc + h/2])\n\n return bboxes",
"def process_image_bbox(image, bbox, labels, file_name):\n bounds, classes, scores = postprocessing(bbox, image)\n image_processed = annotate(image, bounds, classes, scores, labels)\n image_processed.save(file_name, 'png')\n return image_processed",
"def get_bboxes(self, image_path: str, img_pipeline=None):\n pass",
"def bbox(self):\n return np.array(self.path.get_extents()).ravel(order='F')",
"def get_bounding_box_from_xml_path(path: Path) -> np.ndarray:\n with open(path, mode=\"r\") as file:\n bs = BeautifulSoup(file, \"xml\")\n\n x_min = float(bs.bndbox.xmin.string)\n x_max = float(bs.bndbox.xmax.string)\n y_min = float(bs.bndbox.ymin.string)\n y_max = float(bs.bndbox.ymax.string)\n\n return np.array([x_min, y_min, x_max - x_min, y_max - y_min])"
] | [
"0.7414556",
"0.73649496",
"0.71413463",
"0.6696102",
"0.62867755",
"0.6165321",
"0.6104715",
"0.59432495",
"0.59432495",
"0.59264004",
"0.59072345",
"0.59054977",
"0.5792332",
"0.5791462",
"0.577605",
"0.56787",
"0.5672523",
"0.5659785",
"0.5647485",
"0.56438744",
"0.5634939",
"0.56093234",
"0.55983853",
"0.55979973",
"0.556724",
"0.55332816",
"0.5486267",
"0.54659766",
"0.54580766",
"0.54553646"
] | 0.7516535 | 0 |
Extract EPSG number from file name. | def get_proj(fname):
fname = fname.split('_') # fname -> list
i = fname.index('epsg')
return fname[i+1] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_proj(fname):\r\n\r\n fname = fname.split('_') # fname -> list\r\n i = fname.index('epsg')\r\n return fname[i+1]",
"def _epsg(self):\n info = self._info['coordinateSystem']['wkt'].rsplit('\"EPSG\",', 1)[-1]\n return int(re.findall(r\"\\d+\", info)[0])",
"def get_version_filename(filename):\n return re.search(r'\\d+', filename).group(0)",
"def get_num_from_file(file_name):\n basename = file_name.partition('.')[0]\n first, second = basename.split('_')\n num = second.replace(\"genome\", '')\n num = num[1:]\n return int(num)",
"def filename_from(url):\n filename = url.split('/')[-1]\n return filename",
"def extract_filename(str):\n regex = r\"([0-9_-]+).jpg\"\n matches = re.search(regex, str)\n if matches:\n return matches.group(1)",
"def get_radius_from_grfile(grfile, default=0):\n match = re.findall('(\\d+)', grfile)\n if len(match) > 0 and str(grfile).endswith(str(match[-1]) + '.gr'):\n return int(match[-1])\n return default",
"def parseFilename(self, filename):\r\n match = self.filename_regex.match(filename)\r\n if match is None:\r\n # TODO?: Raise exception?\r\n '''print \"Filename\", filename, \"unrecognized!\"'''\r\n return None\r\n lat = int(match.group(2))\r\n lon = int(match.group(4))\r\n if match.group(1) == \"S\":\r\n lat = -lat\r\n if match.group(3) == \"W\":\r\n lon = -lon\r\n return lat, lon",
"def get_ens_num(file):\n f = 'ens' + '(\\d+)'\n match = re.search(f, file)\n if match:\n return int(match.group(1))",
"def parse_num(path):\n nbasename = path.basename.lower()\n if nbasename.startswith(nprefix):\n try:\n return int(nbasename[len(nprefix) :])\n except ValueError:\n pass",
"def get_filename(url: str) ->str:\n if 'drive.google.com' in url:\n return _extract_google_drive_file_id(url)\n url, filename = os.path.split(url)\n return filename or os.path.basename(url)",
"def extract_file_extension(url_file):\n pattern = re.split(\"\\.\",url_file)\n return pattern[-1]",
"def _filename_from_url(url):\n file_name = url.split(\"/\")[-1]\n return file_name",
"def filename(self):\n _, tail = os.path.split(self.url)\n return self.folder + '/' + tail[:-4] + '/' + tail[:-3] + 'shp'",
"def _get_file_name(url: str) -> str:\n url = url.strip('/')\n result = findall(r'/(\\w+\\.\\w+)[?|$]', url)\n if result:\n return result[-1]\n return url.split('/')[-1]",
"def get_extension_from_filename(filename):\n return filename[-4:]",
"def url_file_name(url):\r\n return url[url.rfind('/') + 1:]",
"def extract_id(file_path):\n # An example of file path is AlkEthOH_tripos/AlkEthOH_chain_filt1/AlkEthOH_c555.crd\n return os.path.splitext(os.path.basename(file_path))[0][9:]",
"def get_name_from_filename(filename):\n return filename[:-4]",
"def extract_filefamilyname( self, filename ):\n matchobject = re.search( r\"^.*_\\d\\d\", filename )\n if matchobject is None:\n return filename\n else:\n familyname = filename[0:(matchobject.end()-3)]\n return familyname",
"def filename(self,imgurl):\n if imgurl.find('/'):\n return imgurl.rsplit('/', 1)[1]",
"def get_name_from_file(filename):\n return filename.split(\".\")[0]",
"def filename(self):\n return os.path.basename(self._spatial_filename)",
"def get_imageId_from_fileName(filename, id_iter):\n filename = os.path.splitext(filename)[0]\n if filename.isdigit():\n return int(filename)\n return id_iter",
"def get_imageId_from_fileName(filename):\n filename = os.path.splitext(filename)[0]\n if filename.isdigit():\n return int(filename)\n return id_iter",
"def get_filename(self) -> str:\n fname = self.url.split(\"/\")[-1]\n if \",\" in fname:\n _fname, _i = fname.split(\",\")\n _split_fname = _fname.split(\".\")\n _name = _split_fname[0]\n _extension = _split_fname[-1]\n return _name + _i + \".\" + _extension\n else:\n return fname",
"def return_episode_num(name):\n return int(name.split(\".\")[0].split(\"ep_\")[1]) # Use split to return only the episode number needed to sort the files in increasing order",
"def filename_par(filename, searchstr):\n\tstart = filename.find(searchstr) + len(searchstr)\n\tfinish = start + 1\n\twhile unicode(filename[start:].replace(\".\",\"\"))[:finish-start].isnumeric():\n\t\tfinish += 1\n\treturn float(filename[start:finish-1])",
"def Get_epsg(g, extension = 'tiff'):\n try:\n if extension == 'tiff':\n # Get info of the dataset that is used for transforming\n try:\n dest = gdal.Open(g)\n except:\n dest = g\n g_proj = dest.GetProjection()\n Projection=g_proj.split('EPSG\",\"')\n epsg_to=int((str(Projection[-1]).split(']')[0])[0:-1])\n \n if extension == 'GEOGCS':\n Projection = g\n epsg_to=int((str(Projection).split('\"EPSG\",\"')[-1].split('\"')[0:-1])[0])\n\n except:\n epsg_to=4326\n #print 'Was not able to get the projection, so WGS84 is assumed'\n \n return(epsg_to)",
"def _get_aso_id_from_file_name(self, filename: str) -> str:\n id_parts = filename.split('/')\n prefix = id_parts[1]\n suffix = id_parts[-1].split('.')[0].zfill(3)\n if len(suffix) == 5:\n return suffix\n else:\n return prefix + suffix"
] | [
"0.63735604",
"0.62062746",
"0.6061637",
"0.58868164",
"0.5818382",
"0.5740342",
"0.5669474",
"0.5658069",
"0.5641101",
"0.5620341",
"0.5620254",
"0.56191593",
"0.56164837",
"0.56108224",
"0.5606117",
"0.55956405",
"0.5589816",
"0.558568",
"0.557769",
"0.5521931",
"0.55009794",
"0.5497475",
"0.5487516",
"0.5469965",
"0.5462271",
"0.54598093",
"0.54462576",
"0.5428249",
"0.5417183",
"0.5403374"
] | 0.6381265 | 1 |
Return all 2d '/variable' names in the HDF5. | def get_grid_names(fname):
with h5py.File(fname, 'r') as f:
vnames = [k for k in f.keys() if f[k].ndim == 2]
return vnames | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def data_variable_names(self):\n data_names = []\n mesh = self.mesh_names()\n prefix = mesh[0]+'_'\n for vname in self.nc.variables.keys():\n if vname.startswith(prefix):\n if self.nc.dimensions.has_key(vname):\n continue\n if hasattr(self.nc.variables[vname],'cf_role'):\n continue\n data_names.append( vname[len(prefix):] )\n return data_names",
"def variable_names(self):\n \n return [x['variable'] for x in self.variable_dicts()]",
"def variables_used (self) :\r\n\t\t## These names do not contain dimension specification (everything in brackets\r\n\t\t## that comes after a name is am array index - either the arry was declared\r\n\t\t## correctly or it is wrong anyway, there is no implicit declaration of arrays) !\r\n\r\n\t\tresult = []\r\n\r\n\t\tfor l in self.equ_lists :\r\n\t\t\tfor var_name in l :\r\n\t\t\t\tresult.append(var_name[0])\r\n\t\treturn result",
"def get_variable_names(self):\n return [var[1] for var in self.variables]",
"def getOthVarNames( self ):\n\n if self.othVarNames:\n return self.othVarNames.keys()\n\n n = self.adb.get( \"nOthVars\" )\n for indx in range( n ):\n name = self.adb.get( \"othVarName\",\n indx ) \n self.othVarNames[ name ] = indx\n\n return self.othVarNames.keys()",
"def get_node_variable_names(self):\n return [b\"\".join(_i).strip().decode()\n for _i in self._f.variables[\"name_nod_var\"][:]]",
"def getOhcVarNames( self ):\n\n if self.ohcVarNames:\n return self.ohcVarNames.keys()\n \n n = self.adb.get( \"nOhcVars\" )\n for indx in xrange( n ):\n name = self.adb.get( \"ohcVarName\",\n indx ) \n self.ohcVarNames[name] = indx\n\n return self.ohcVarNames.keys()",
"def variables_used (self) :\r\n\t\t## These names possibly contain dimension specification!\r\n\t\treturn self.variable_names",
"def read_hdf5_group(filename, gname, vars_name=None):\n fid = h5py.File(filename, 'r')\n gid = fid.get(gname)\n if vars_name is None: vars_name = list(gid.keys())\n\n data = {}\n for var_name in vars_name:\n try:\n dset = gid.get(var_name)\n shape = dset.shape\n data[var_name] = np.zeros(shape)\n dset.read_direct(data[var_name])\n except:\n pass\n fid.close()\n print('Read from ', ''.join((filename,'/',gname)))\n print('Variables names = ')\n print('\\n'.join(vars_name))\n\n return data, vars_name",
"def getOeiVarNames( self ):\n\n if self.oeiVarNames:\n return self.oeiVarNames.keys()\n\n n = self.adb.get( \"nOeiVars\" )\n for indx in xrange( n ):\n name = self.adb.get( \"oeiVarName\",\n indx ) \n self.oeiVarNames[name] = indx\n\n return self.oeiVarNames.keys()",
"def variables(model: Model) -> AbstractSet[str]:\r\n assert is_model(model)\r\n return model.keys()",
"def getVariableList(dataset):\n variables = [v for v in dataset.variables.keys() if v not in dataset.dimensions.keys()]\n for d in dataset.dimensions.keys():\n try:\n variables.pop(variables.index(dataset.variables[d].getncattr(\"bounds\")))\n except:\n pass\n return variables",
"def variables(model: Model) -> AbstractSet[str]:\n assert is_model(model)\n return model.keys()",
"def variables(self):\n return [i.name for i in self.inputs + self.outputs]",
"def get_element_variable_names(self):\n return [b\"\".join(_i).strip().decode()\n for _i in self._f.variables[\"name_elem_var\"][:]]",
"def get_variable_names(self):\n return [VariableString(s) for s in\n self._design.GetVariables()+self._design.GetPostProcessingVariables()]",
"def variables(self):\n return self.dataset.data_vars",
"def getLinIterVarNames( self ):\n\n self.updateAdb( )\n\n return self.iterNames.keys()",
"def vars(self):\n return [Var(i,self.dims[i]) for i in range(self.nvar)] # TODO: use stored state info (=1 sometimes)",
"def getOfcVarNames( self ):\n\n if self.ofcVarNames:\n return self.ofcVarNames.keys()\n \n n = self.adb.get( \"nOfcVars\" )\n for indx in xrange( n ):\n name = self.adb.get( \"ofcVarName\",\n indx ) \n self.ofcVarNames[name] = indx\n\n return self.ofcVarNames.keys()",
"def get_layer_var_names(self):\n return(self.params)",
"def get_all_variables_names(self):\n return self.project.get_variable_names() + self.design.get_variable_names()",
"def getResRatioVarNames( self ):\n\n self.updateAdb( )\n\n return self.resNames.keys()",
"def getOriVarNames( self ):\n\n if self.oriVarNames:\n return self.oriVarNames.keys()\n\n n = self.adb.get( \"nOriVars\" )\n for indx in xrange( n ):\n name = self.adb.get( \"oriVarName\",\n indx ) \n self.oriVarNames[name] = indx\n\n return self.oriVarNames.keys()",
"def getDataVariableNames(self, product):\r\n return []",
"def getDataVariableNames(self, product):\r\n\r\n h = product.getSceneRasterHeight()\r\n\r\n # 10m resolution\r\n if h == 10980:\r\n return self.return_available_variables(product, DATA_VARIABLE_NAMES_10m)\r\n\r\n # 20m resolution\r\n elif h == 5490:\r\n return self.return_available_variables(product, DATA_VARIABLE_NAMES_20m)\r\n\r\n # 20m resolution\r\n elif h == 1830:\r\n return self.return_available_variables(product, DATA_VARIABLE_NAMES_60m)",
"def getSolRatioVarNames( self ):\n\n self.updateAdb( )\n\n return self.solNames.keys()",
"def get_variable_names(filepath):\n variables = set()\n with open(filepath, \"r\") as f:\n previous = \"\"\n for line in f.readlines():\n if line[0] == \"#\":\n previous = line\n var_names = None\n continue\n if var_names is not None:\n continue\n var_names = previous.split()[1:]\n while \"vs\" in var_names:\n var_names.remove(\"vs\")\n for name in var_names:\n variables.add(name)\n return list(variables)",
"def get_variable_attributes(model_data, header, variable, variable_name):\n header.append('# {}_column: {}\\n'.format(variable, variable_name))\n for attr, value in vars(model_data.variables[variable]).items():\n if '_range' in attr:\n header.append('# {}_{}: {},{}\\n'.format(variable, attr, value[0], value[1]))\n else:\n header.append('# {}_{}: {}\\n'.format(variable, attr, value))\n return header",
"def get_variables(self) -> np.array:\n pass"
] | [
"0.6690074",
"0.62676114",
"0.61328524",
"0.60728717",
"0.6032376",
"0.5962701",
"0.5961076",
"0.5934028",
"0.5920969",
"0.5817062",
"0.57885367",
"0.57675433",
"0.57535744",
"0.5749644",
"0.5723075",
"0.57072866",
"0.56532377",
"0.56390136",
"0.5622561",
"0.56167597",
"0.5583233",
"0.55704904",
"0.5569377",
"0.55612564",
"0.55557597",
"0.55527127",
"0.5549425",
"0.55294627",
"0.5500263",
"0.5489356"
] | 0.6843741 | 0 |
Test that initializing a Matern1/2 kernel with 0 lengthscale raises an exception | def test_matern_zero_lengthscale(matern):
with pytest.raises(ValueError) as exp:
matern(lengthscale=0.0, variance=1.0, output_dim=1)
assert exp.value.args[0].find("lengthscale must be positive.") >= 0 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def testZeroInput(self):\n nb.rescale_length(2.0)\n nb.rescale_length(0)\n self.assertEqual(2.0, nb.rscale)",
"def testZeroInput(self):\n self.assertRaises(TypeError, nb.rscale,)",
"def testKernelsNotSpecified(self):\n with self.assertRaisesRegexp(ValueError, \"`kernel_shape` cannot be None.\"):\n snt.Conv1DTranspose(output_channels=1)",
"def test_nonpositive_nu_raises_exception(nu):\n with pytest.raises(ValueError):\n kernels.Matern(input_dim=1, nu=nu)",
"def testKernelsNotSpecified(self):\n with self.assertRaisesRegexp(ValueError, \"`kernel_shape` cannot be None.\"):\n snt.Conv2DTranspose(output_channels=1)",
"def test_gauss_kernel():\n\n gauss = gauss_kernel(2, 5)\n\n assert gauss.shape == (5, 5)\n assert gauss[2, 2] == 0.039788735772973836",
"def init_kernel(cls, m):\n pass",
"def testKernelShape(self):\n\n snt.Conv3D(output_channels=10, kernel_shape=[3, 4, 5], name=\"conv1\")\n snt.Conv3D(output_channels=10, kernel_shape=3, name=\"conv1\")\n\n with self.assertRaisesRegexp(snt.Error, \"Invalid kernel shape.*\"):\n snt.Conv3D(output_channels=10, kernel_shape=[3, 3], name=\"conv1\")\n snt.Conv3D(output_channels=10, kernel_shape=[3, 3, 3, 3], name=\"conv1\")",
"def testKernelShape(self, use_bias):\n\n snt.Conv1D(output_channels=10, kernel_shape=[3], name=\"conv1\",\n use_bias=use_bias)\n snt.Conv1D(output_channels=10, kernel_shape=3, name=\"conv1\",\n use_bias=use_bias)\n\n err = \"Invalid kernel shape\"\n with self.assertRaisesRegexp(snt.IncompatibleShapeError, err):\n snt.Conv1D(output_channels=10, kernel_shape=[3, 3], name=\"conv1\")",
"def testNoFeatureColumnsOrKernelMappers(self):\n with self.assertRaises(ValueError):\n _ = kernel_estimators.KernelLinearClassifier()",
"def testKernelShape(self, use_bias):\n\n snt.Conv2D(output_channels=10, kernel_shape=[3, 4], name=\"conv1\",\n use_bias=use_bias)\n snt.Conv2D(output_channels=10, kernel_shape=3, name=\"conv1\",\n use_bias=use_bias)\n\n err = \"Invalid kernel shape\"\n with self.assertRaisesRegexp(snt.IncompatibleShapeError, err):\n snt.Conv2D(output_channels=10,\n kernel_shape=[3, 3, 3],\n name=\"conv1\")",
"def test_fixedkernel(self):\r\n X = np.random.rand(30, 4)\r\n K = np.dot(X, X.T)\r\n kernel = GPy.kern.fixed(4, K)\r\n kern = GPy.kern.poly(5, degree=4)\r\n self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose))",
"def testKernelShape(self, use_bias):\n\n # No check against output_channels is done yet (needs input size).\n snt.SeparableConv2D(\n output_channels=1,\n channel_multiplier=2,\n kernel_shape=[3, 4],\n name=\"conv1\",\n use_bias=use_bias)\n snt.SeparableConv2D(\n output_channels=1, channel_multiplier=1, kernel_shape=3, name=\"conv1\")\n\n error_msg = (r\"Invalid kernel shape: x is \\[3], must be either a positive\"\n r\" integer or an iterable of positive integers of size 2\")\n with self.assertRaisesRegexp(snt.IncompatibleShapeError, error_msg):\n snt.SeparableConv2D(output_channels=1,\n channel_multiplier=3,\n kernel_shape=[3],\n use_bias=use_bias)",
"def testKernelShape(self, use_bias):\n\n # No check against output_channels is done yet (needs input size).\n snt.SeparableConv1D(\n output_channels=1,\n channel_multiplier=2,\n kernel_shape=[3],\n name=\"conv1\",\n use_bias=use_bias)\n snt.SeparableConv1D(\n output_channels=1, channel_multiplier=1, kernel_shape=3, name=\"conv1\")\n\n error_msg = (r\"Invalid kernel shape: x is \\[3, 3\\], must be either a \"\n r\"positive integer or an iterable of positive integers of \"\n r\"size 1\")\n with self.assertRaisesRegexp(snt.IncompatibleShapeError, error_msg):\n snt.SeparableConv1D(output_channels=1,\n channel_multiplier=3,\n kernel_shape=[3, 3],\n use_bias=use_bias)",
"def test_nu_large_recovers_rbf_kernel(x0: np.ndarray, x1: np.ndarray, input_dim: int):\n lengthscale = 1.25\n kernmat_rbf = kernels.ExpQuad(lengthscale=lengthscale, input_dim=input_dim)\n kernmat_matern = kernels.Matern(lengthscale=lengthscale, nu=15, input_dim=input_dim)\n np.testing.assert_allclose(\n kernmat_rbf(x0, x1),\n kernmat_matern(x0, x1),\n err_msg=\"RBF and Matern kernel are not equivalent for nu=infty.\",\n rtol=0.05,\n atol=0.01,\n )",
"def testInvalidKernelMapper(self):\n\n class DummyKernelMapper(object):\n\n def __init__(self):\n pass\n\n feature = layers.real_valued_column('feature')\n kernel_mappers = {feature: [DummyKernelMapper()]}\n with self.assertRaises(ValueError):\n _ = kernel_estimators.KernelLinearClassifier(\n feature_columns=[feature], kernel_mappers=kernel_mappers)",
"def __init__(self, dim): #, length_scale, length_scale_bounds=()):\n# assert isinstance(column, (list, tuple, int)), \"must be int or list of ints\"\n# self.column = [column] if isinstance(column, int) else column\n# assert all(isinstance(i, int) for i in self.column), \"must be integers\"\n self.dim = dim\n \n kernels = [Projection([c]) for c in range(dim)]\n\n # combine the kernels into a single product kernel\n self.kernel = reduce(lambda k0, k1 : k0 * k1, kernels)",
"def testNegativeInput(self):\n nb.rescale_length(2.0)\n nb.rescale_length(-1.0)\n self.assertEqual(2.0, nb.rscale)",
"def testKernelShape(self, use_bias):\n\n snt.DepthwiseConv2D(channel_multiplier=1, kernel_shape=[3, 4])\n snt.DepthwiseConv2D(channel_multiplier=1, kernel_shape=3)\n error_msg = (r\"Invalid kernel shape: x is \\[3], must be either a positive\"\n r\" integer or an iterable of positive integers of size 2\")\n with self.assertRaisesRegexp(snt.IncompatibleShapeError, error_msg):\n snt.DepthwiseConv2D(channel_multiplier=1, kernel_shape=[3],\n use_bias=use_bias, name=\"conv1\")",
"def __init__(self,\n batch_size,\n max_num_context,\n x_size=1,\n y_size=1,\n l1_scale=0.6,\n sigma_scale=1.0,\n random_kernel_parameters=True,\n kernel = 'SE', #valid options {SE,PER}\n testing=False):\n self._batch_size = batch_size\n self._max_num_context = max_num_context\n self._x_size = x_size\n self._y_size = y_size\n self._l1_scale = l1_scale\n self._sigma_scale = sigma_scale\n self._random_kernel_parameters = random_kernel_parameters\n self._testing = testing\n self._kernel = kernel",
"def test_conv2d_out_of_range_scale():\n np.random.seed(0)\n\n input_sc = 1024\n kernel_sc = 1024\n output_sc = 1\n\n model, _ = _get_model(\n (1, 4, 4, 4),\n 1,\n 1,\n 0,\n input_sc,\n 0,\n kernel_sc,\n 0,\n output_sc,\n \"none\",\n (1, 1),\n (1, 1),\n 1,\n \"uint8\",\n 8,\n \"HWIO\",\n )\n model = tei.make_ethosn_composite(model, \"ethos-n.qnn_conv2d\")\n mod = tei.make_ethosn_partition(model)\n\n expected_err_msg = (\n \"Overall scale (of the input * weights / output) should be in the range (2^-32, 65536)\"\n )\n tei.test_error(mod, {}, expected_err_msg)",
"def __init__(self, kernel_size):\r\n super().__init__()\r\n self.kernel_size = kernel_size",
"def __init__(self, kernel_size):\r\n super().__init__()\r\n self.kernel_size = kernel_size",
"def init():\n\tN = np.int32(DIM) #prepare for stitching\n\t#HII_DIM = np.int32(HII_DIM)\n\tf_pixel_factor = DIM/HII_DIM;\n\tscale = np.float32(BOX_LEN)/DIM\n\tHII_scale = np.float32(BOX_LEN)/HII_DIM\n\tshape = (N,N,N)\n\t\n\tMRGgen = MRG32k3aRandomNumberGenerator(seed_getter=seed_getter_uniform, offset=0)\n\n\tkernel_source = open(cmd_folder+\"/initialize.cu\").read()\n\tkernel_code = kernel_source % {\n\n\t\t'DELTAK': DELTA_K,\n\t\t'VOLUME': VOLUME,\n\t\t'DIM': DIM\n\t}\n\tmain_module = nvcc.SourceModule(kernel_code)\n\tinit_kernel = main_module.get_function(\"init_kernel\")\n\tHII_filter = main_module.get_function(\"HII_filter\")\n\tadj_complex_conj = main_module.get_function(\"adj_complex_conj\")\n\tsubsample_kernel = main_module.get_function(\"subsample\")\n\tvelocity_kernel = main_module.get_function(\"set_velocity\")\n\tpspec_texture = main_module.get_texref(\"pspec\")\n\n\tinterpPspec, interpSize = init_pspec() #interpPspec contains both k array and P array\n\tinterp_cu = cuda.matrix_to_array(interpPspec, order='F')\n\tcuda.bind_array_to_texref(interp_cu, pspec_texture)\n\n\tlargebox_d = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d, np.int32(DIM), block=block_size, grid=grid_size)\n\n\t#import IPython; IPython.embed()\n\tlargebox_d_imag = gpuarray.zeros(shape, dtype=np.float32)\n\tinit_kernel(largebox_d_imag, np.int32(DIM), block=block_size, grid=grid_size)\n\n\tlargebox_d *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d_imag *= MRGgen.gen_normal(shape, dtype=np.float32)\n\tlargebox_d = largebox_d + np.complex64(1.j) * largebox_d_imag\n\n\t#adj_complex_conj(largebox_d, DIM, block=block_size, grid=grid_size)\n\tlargebox = largebox_d.get()\n\t#np.save(parent_folder+\"/Boxes/deltak_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox)\n\n\t#save real space box before smoothing\n\tplan = Plan(shape, dtype=np.complex64)\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tnp.save(parent_folder+\"/Boxes/deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(DIM, BOX_LEN), largebox_d.real.get_async())\n\n\t#save real space box after smoothing and subsampling\n\t# host largebox is still in k space, no need to reload from disk\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tHII_filter(largebox_d, N, ZERO, smoothR, block=block_size, grid=grid_size);\n\tplan.execute(largebox_d, inverse=True) #FFT to real space of smoothed box\n\tlargebox_d /= scale**3\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tsubsample_kernel(largebox_d.real, smallbox_d, N, HII_DIM, PIXEL_FACTOR, block=block_size, grid=HII_grid_size) #subsample in real space\n\tnp.save(parent_folder+\"/Boxes/smoothed_deltax_z0.00_{0:d}_{1:.0f}Mpc\".format(HII_DIM, BOX_LEN), smallbox_d.get_async())\n\n\t# reload the k-space box for velocity boxes\n\tlargebox_d = gpuarray.to_gpu(largebox)\n\t\n\t#largebox_d /= VOLUME #divide by VOLUME if using fft (vs ifft)\n\tsmoothR = np.float32(L_FACTOR*BOX_LEN/HII_DIM)\n\tlargevbox_d = gpuarray.zeros((DIM,DIM,DIM), dtype=np.complex64)\n\tsmallbox_d = gpuarray.zeros(HII_shape, dtype=np.float32)\n\tfor num, mode in enumerate(['x', 'y', 'z']):\n\t\tvelocity_kernel(largebox_d, largevbox_d, DIM, np.int32(num), block=block_size, grid=grid_size)\n\t\tHII_filter(largevbox_d, DIM, ZERO, smoothR, block=block_size, grid=grid_size)\n\t\tplan.execute(largevbox_d, inverse=True)\n\t\tlargevbox_d /= scale**3\n\t\t#import IPython; IPython.embed()\n\t\tsubsample_kernel(largevbox_d.real, smallbox_d, DIM, HII_DIM,PIXEL_FACTOR, block=block_size, grid=HII_grid_size)\n\t\tnp.save(parent_folder+\"/Boxes/v{0}overddot_{1:d}_{2:.0f}Mpc\".format(mode, HII_DIM, BOX_LEN), smallbox_d.get())\n\n\treturn",
"def test_first_level_with_no_signal_scaling():\n shapes, rk = [(3, 1, 1, 2)], 1\n fmri_data = list()\n design_matrices = list()\n design_matrices.append(pd.DataFrame(np.ones((shapes[0][-1], rk)),\n columns=list(\n 'abcdefghijklmnopqrstuvwxyz')[:rk])\n )\n # Check error with invalid signal_scaling values\n with pytest.raises(ValueError,\n match=\"signal_scaling must be\"):\n FirstLevelModel(mask_img=False, noise_model='ols',\n signal_scaling=\"foo\")\n\n first_level = FirstLevelModel(mask_img=False, noise_model='ols',\n signal_scaling=False)\n fmri_data.append(Nifti1Image(np.zeros((1, 1, 1, 2)) + 6, np.eye(4)))\n\n first_level.fit(fmri_data, design_matrices=design_matrices)\n # trivial test of signal_scaling value\n assert first_level.signal_scaling is False\n # assert that our design matrix has one constant\n assert first_level.design_matrices_[0].equals(\n pd.DataFrame([1.0, 1.0], columns=['a']))\n # assert that we only have one theta as there is only on voxel in our image\n assert first_level.results_[0][0].theta.shape == (1, 1)\n # assert that the theta is equal to the one voxel value\n assert_almost_equal(first_level.results_[0][0].theta[0, 0], 6.0, 2)",
"def test_kernel_matrix(kernel, sample):\n sample = [ele for ele in sample] # consumed several times\n\n potato = KernelMethod(kernel)\n mat = potato.matrix(sample)\n assert np.all(np.linalg.eigvals(mat) > 0) or np.isclose(\n [np.min(np.linalg.eigvals(mat))], [0]\n )",
"def testMaskErrorIncompatibleRank2(self):\n\n np_mask = np.ones((3, 3))\n x = tf.constant(0.0, shape=(2, 8, 6))\n\n # Test with both numpy arrays and Tensors.\n for mask in (np_mask, tf.convert_to_tensor(np_mask)):\n with self.assertRaises(snt.Error) as cm:\n snt.Conv1D(output_channels=4, kernel_shape=5, mask=mask)(x)\n self.assertTrue(str(cm.exception).startswith(\n \"Invalid mask shape: {}\".format(np_mask.shape)))",
"def kernel_test(slabs, data, backend):\n Q = data[:, 0]\n\n layers = []\n for thickness, rsld, isld, sigma in slabs:\n layers.append(\n model.Layer(\n b=(rsld - 1j * isld), dens=0.1, d=thickness, sigma=sigma\n )\n )\n layers.reverse()\n stack = model.Stack(Layers=list(layers[1:-1]), Repetitions=1)\n sample = model.Sample(\n Stacks=[stack], Ambient=layers[-1], Substrate=layers[0]\n )\n # print(sample)\n\n inst = model.Instrument(\n probe=backend,\n wavelength=1.54,\n coords=\"q\",\n I0=1,\n res=0,\n restype=\"no conv\",\n respoints=5,\n resintrange=2,\n beamw=0.1,\n footype=\"no corr\",\n samplelen=10,\n pol=\"uu\",\n )\n if data.shape[1] == 4:\n dQ = data[:, 3]\n inst.restype = \"full conv and varying res.\"\n inst.res = dQ\n if backend == \"neutron pol spin flip\":\n # memory issues in matrix formalism if too many data points\n inst.respoints = 101\n else:\n inst.respoints = (\n 10001 # try to use same convolution as ref1d when generating\n )\n inst.resintrange = 3.5\n\n # print(inst)\n R = sample.SimSpecular(Q, inst)\n\n assert R.shape == data[:, 1].shape\n if data.shape[1] == 4:\n # validation accuracy is reduced for resolution runs, as strongly\n # depends on numerical convolution scheme\n if backend == \"neutron pol spin flip\":\n np.testing.assert_allclose(R, data[:, 1], rtol=0.005)\n else:\n np.testing.assert_allclose(R, data[:, 1], rtol=0.001)\n else:\n np.testing.assert_allclose(R, data[:, 1], rtol=0.001)",
"def test_MKDADensity_kernel_instance_with_kwargs(testdata_cbma):\n kern = MKDAKernel(r=2)\n meta = MKDADensity(kern, kernel__r=6, null_method=\"montecarlo\", n_iters=10)\n\n assert meta.kernel_transformer.get_params().get(\"r\") == 2",
"def __init__(self, columns): #, length_scale, length_scale_bounds=()):\n# assert isinstance(column, (list, tuple, int)), \"must be int or list of ints\"\n# self.column = [column] if isinstance(column, int) else column\n# assert all(isinstance(i, int) for i in self.column), \"must be integers\"\n self.columns = columns \n\n kernels = [Projection([c]) for c in columns]\n #factor_name(c)) for c in columns]\n \n # collect all the kernels to be combined into a single product kernel\n super(SimpleFactorKernel, self).__init__(kernels)"
] | [
"0.6572343",
"0.6536193",
"0.63661206",
"0.6360225",
"0.6295031",
"0.6194598",
"0.6051746",
"0.5993452",
"0.59858924",
"0.59765357",
"0.59129274",
"0.5900474",
"0.5889973",
"0.58620816",
"0.5820467",
"0.58139074",
"0.5799146",
"0.5781969",
"0.57249826",
"0.57009274",
"0.5688538",
"0.5544956",
"0.5544956",
"0.5529479",
"0.55000484",
"0.54913867",
"0.5491348",
"0.54885775",
"0.54483104",
"0.544471"
] | 0.6592899 | 0 |
Test that initializing a Matern1/2 kernel with 0 variance raises an exception | def test_matern12_zero_variance(matern):
with pytest.raises(ValueError) as exp:
matern(lengthscale=1.0, variance=0.0, output_dim=1)
assert exp.value.args[0].find("variance must be positive.") >= 0 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_gauss_kernel():\n\n gauss = gauss_kernel(2, 5)\n\n assert gauss.shape == (5, 5)\n assert gauss[2, 2] == 0.039788735772973836",
"def test_nonpositive_nu_raises_exception(nu):\n with pytest.raises(ValueError):\n kernels.Matern(input_dim=1, nu=nu)",
"def testKernelsNotSpecified(self):\n with self.assertRaisesRegexp(ValueError, \"`kernel_shape` cannot be None.\"):\n snt.Conv1DTranspose(output_channels=1)",
"def testKernelsNotSpecified(self):\n with self.assertRaisesRegexp(ValueError, \"`kernel_shape` cannot be None.\"):\n snt.Conv2DTranspose(output_channels=1)",
"def testKernelShape(self, use_bias):\n\n snt.Conv1D(output_channels=10, kernel_shape=[3], name=\"conv1\",\n use_bias=use_bias)\n snt.Conv1D(output_channels=10, kernel_shape=3, name=\"conv1\",\n use_bias=use_bias)\n\n err = \"Invalid kernel shape\"\n with self.assertRaisesRegexp(snt.IncompatibleShapeError, err):\n snt.Conv1D(output_channels=10, kernel_shape=[3, 3], name=\"conv1\")",
"def testKernelShape(self, use_bias):\n\n snt.Conv2D(output_channels=10, kernel_shape=[3, 4], name=\"conv1\",\n use_bias=use_bias)\n snt.Conv2D(output_channels=10, kernel_shape=3, name=\"conv1\",\n use_bias=use_bias)\n\n err = \"Invalid kernel shape\"\n with self.assertRaisesRegexp(snt.IncompatibleShapeError, err):\n snt.Conv2D(output_channels=10,\n kernel_shape=[3, 3, 3],\n name=\"conv1\")",
"def testBiasInitializerIsZeroByDefault(self):\n\n conv1 = snt.Conv3D(\n output_channels=5,\n kernel_shape=3,\n stride=1)\n\n conv1(tf.placeholder(tf.float32, [5, 10, 10, 10, 7]))\n\n with self.test_session():\n tf.variables_initializer([conv1.w, conv1.b]).run()\n\n self.assertAllClose(\n conv1.b.eval(),\n np.zeros([5], dtype=np.float32))",
"def testNoFeatureColumnsOrKernelMappers(self):\n with self.assertRaises(ValueError):\n _ = kernel_estimators.KernelLinearClassifier()",
"def testKernelShape(self, use_bias):\n\n # No check against output_channels is done yet (needs input size).\n snt.SeparableConv1D(\n output_channels=1,\n channel_multiplier=2,\n kernel_shape=[3],\n name=\"conv1\",\n use_bias=use_bias)\n snt.SeparableConv1D(\n output_channels=1, channel_multiplier=1, kernel_shape=3, name=\"conv1\")\n\n error_msg = (r\"Invalid kernel shape: x is \\[3, 3\\], must be either a \"\n r\"positive integer or an iterable of positive integers of \"\n r\"size 1\")\n with self.assertRaisesRegexp(snt.IncompatibleShapeError, error_msg):\n snt.SeparableConv1D(output_channels=1,\n channel_multiplier=3,\n kernel_shape=[3, 3],\n use_bias=use_bias)",
"def testVariablesWithAndWithoutKernels(self):\n multi_dim_feature = layers.real_valued_column(\n 'multi_dim_feature', dimension=2)\n\n linear_classifier = kernel_estimators.KernelLinearClassifier(\n feature_columns=[multi_dim_feature])\n linear_classifier.fit(\n input_fn=_linearly_inseparable_binary_input_fn, steps=50)\n linear_variables = linear_classifier.get_variable_names()\n self.assertIn('linear/multi_dim_feature/weight', linear_variables)\n self.assertIn('linear/bias_weight', linear_variables)\n linear_weights = linear_classifier.get_variable_value(\n 'linear/multi_dim_feature/weight')\n linear_bias = linear_classifier.get_variable_value('linear/bias_weight')\n\n kernel_mappers = {\n multi_dim_feature: [RandomFourierFeatureMapper(2, 30, 0.6, 1, 'rffm')]\n }\n kernel_linear_classifier = kernel_estimators.KernelLinearClassifier(\n feature_columns=[], kernel_mappers=kernel_mappers)\n kernel_linear_classifier.fit(\n input_fn=_linearly_inseparable_binary_input_fn, steps=50)\n kernel_linear_variables = kernel_linear_classifier.get_variable_names()\n self.assertIn('linear/multi_dim_feature_MAPPED/weight',\n kernel_linear_variables)\n self.assertIn('linear/bias_weight', kernel_linear_variables)\n kernel_linear_weights = kernel_linear_classifier.get_variable_value(\n 'linear/multi_dim_feature_MAPPED/weight')\n kernel_linear_bias = kernel_linear_classifier.get_variable_value(\n 'linear/bias_weight')\n\n # The feature column used for linear classification (no kernels) has\n # dimension 2 so the model will learn a 2-dimension weights vector (and a\n # scalar for the bias). In the kernelized model, the features are mapped to\n # a 30-dimensional feature space and so the weights variable will also have\n # dimension 30.\n self.assertEqual(2, len(linear_weights))\n self.assertEqual(1, len(linear_bias))\n self.assertEqual(30, len(kernel_linear_weights))\n self.assertEqual(1, len(kernel_linear_bias))",
"def testKernelShape(self, use_bias):\n\n # No check against output_channels is done yet (needs input size).\n snt.SeparableConv2D(\n output_channels=1,\n channel_multiplier=2,\n kernel_shape=[3, 4],\n name=\"conv1\",\n use_bias=use_bias)\n snt.SeparableConv2D(\n output_channels=1, channel_multiplier=1, kernel_shape=3, name=\"conv1\")\n\n error_msg = (r\"Invalid kernel shape: x is \\[3], must be either a positive\"\n r\" integer or an iterable of positive integers of size 2\")\n with self.assertRaisesRegexp(snt.IncompatibleShapeError, error_msg):\n snt.SeparableConv2D(output_channels=1,\n channel_multiplier=3,\n kernel_shape=[3],\n use_bias=use_bias)",
"def testKernelShape(self):\n\n snt.Conv3D(output_channels=10, kernel_shape=[3, 4, 5], name=\"conv1\")\n snt.Conv3D(output_channels=10, kernel_shape=3, name=\"conv1\")\n\n with self.assertRaisesRegexp(snt.Error, \"Invalid kernel shape.*\"):\n snt.Conv3D(output_channels=10, kernel_shape=[3, 3], name=\"conv1\")\n snt.Conv3D(output_channels=10, kernel_shape=[3, 3, 3, 3], name=\"conv1\")",
"def testKernelShape(self, use_bias):\n\n snt.DepthwiseConv2D(channel_multiplier=1, kernel_shape=[3, 4])\n snt.DepthwiseConv2D(channel_multiplier=1, kernel_shape=3)\n error_msg = (r\"Invalid kernel shape: x is \\[3], must be either a positive\"\n r\" integer or an iterable of positive integers of size 2\")\n with self.assertRaisesRegexp(snt.IncompatibleShapeError, error_msg):\n snt.DepthwiseConv2D(channel_multiplier=1, kernel_shape=[3],\n use_bias=use_bias, name=\"conv1\")",
"def testZeroInput(self):\n self.assertRaises(TypeError, nb.rscale,)",
"def test_kernel_matrix(kernel, sample):\n sample = [ele for ele in sample] # consumed several times\n\n potato = KernelMethod(kernel)\n mat = potato.matrix(sample)\n assert np.all(np.linalg.eigvals(mat) > 0) or np.isclose(\n [np.min(np.linalg.eigvals(mat))], [0]\n )",
"def test_param_cov_with_uncertainties(self, fitter):\n fitter = fitter()\n\n a = 2\n b = 100\n\n with NumpyRNGContext(_RANDOM_SEED):\n x = np.linspace(0, 1, 100)\n # y scatter is amplitude ~1 to make sure covariance is\n # non-negligible\n y = x * a + b + np.random.normal(size=len(x))\n sigma = np.random.normal(loc=1, scale=0.1, size=len(x))\n\n # compute the ordinary least squares covariance matrix\n # accounting for measurement uncertainties `sigma`\n X = np.vstack([x, np.ones(len(x))]).T\n inv_N = np.linalg.inv(np.diag(sigma) ** 2)\n cov = np.linalg.inv(X.T @ inv_N @ X)\n beta = cov @ X.T @ inv_N @ y.T\n\n # now do the non-linear least squares fit\n mod = models.Linear1D(a, b)\n\n with pytest.warns(AstropyUserWarning, match=r\"Model is linear in parameters\"):\n fmod = fitter(mod, x, y, weights=sigma**-1)\n\n assert_allclose(fmod.parameters, beta.ravel())\n assert_allclose(cov, fitter.fit_info[\"param_cov\"])",
"def init_kernel(cls, m):\n pass",
"def __init__(self, N0, N1):\n #self.w = np.zeros(N);\n self.p0 = N0/(N0+N1) \n self.p1 = N1/(N0+N1)\n self.mu0 = np.zeros(N0+N1)\n self.mu1 = np.zeros(N0+N1)\n self.covariance = 0",
"def white(input_dim,variance=1.):\r\n part = parts.white.White(input_dim,variance)\r\n return kern(input_dim, [part])",
"def test_calculate_variance_covariance_zero_division_shape(self):\n\n _var_covar = calculate_variance_covariance(22, 620.0, 0.4239, 0.0)\n self.assertAlmostEqual(_var_covar[0][0], 0.006105992)\n self.assertAlmostEqual(_var_covar[0][1], 0.03925982)\n self.assertAlmostEqual(_var_covar[1][0], 0.03925982)\n self.assertAlmostEqual(_var_covar[1][1], -0.7475704)",
"def testBiasInitializerIsZeroByDefault(self):\n\n conv1 = snt.Conv3DTranspose(\n output_channels=7,\n kernel_shape=3,\n stride=1)\n\n conv1(tf.placeholder(tf.float32, [7, 10, 10, 10, 5]))\n\n with self.test_session():\n tf.variables_initializer([conv1.w, conv1.b]).run()\n\n self.assertAllClose(\n conv1.b.eval(),\n np.zeros([7], dtype=np.float32))",
"def test_low_variance(self):\n # Cycle through various initializations\n initializations = ['random', 'pca']\n allowed = 1e-3\n\n for init in initializations:\n tsne = TSNE(initialization=init, perplexity=2)\n embedding = tsne.prepare_initial(self.x)\n np.testing.assert_array_less(np.var(embedding, axis=0), allowed,\n 'using the `%s` initialization' % init)",
"def normal_init(m, mean, std):\n if isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Conv2d):\n m.weight.data.normal_(mean, std)\n m.bias.data.zero_()",
"def abe(img,variance):\n nominator = img**2-3*variance\n nominator[nominator<0] = 0\n out = np.divide(nominator,img)\n out[img==0]=0\n return out",
"def test_fixedkernel(self):\r\n X = np.random.rand(30, 4)\r\n K = np.dot(X, X.T)\r\n kernel = GPy.kern.fixed(4, K)\r\n kern = GPy.kern.poly(5, degree=4)\r\n self.assertTrue(GPy.kern.kern_test(kern, verbose=verbose))",
"def testInitializers(self, use_bias):\n\n w_dw = random.random()\n w_pw = random.random()\n b = np.random.randn(6) # Kernel shape is 3, input channels are 2, 2*3 = 6.\n conv1 = snt.SeparableConv2D(\n output_channels=6,\n channel_multiplier=3,\n kernel_shape=3,\n use_bias=use_bias,\n initializers=create_separable_constant_initializers(\n w_dw, w_pw, b, use_bias))\n\n conv1(tf.placeholder(tf.float32, [1, 10, 10, 2]))\n\n with self.test_session():\n tf.variables_initializer(\n [conv1.w_dw, conv1.w_pw, conv1.b] if use_bias else\n [conv1.w_dw, conv1.w_pw]).run()\n\n self.assertAllClose(\n conv1.w_dw.eval(), np.full(\n [3, 3, 2, 3], w_dw, dtype=np.float32))\n self.assertAllClose(\n conv1.w_pw.eval(), np.full(\n [1, 1, 6, 6], w_pw, dtype=np.float32))\n\n if use_bias:\n self.assertAllClose(conv1.b.eval(), b)\n\n error_msg = \"Initializer for 'w_dw' is not a callable function\"\n with self.assertRaisesRegexp(TypeError, error_msg):\n snt.SeparableConv2D(\n output_channels=3,\n channel_multiplier=1,\n kernel_shape=3,\n stride=1,\n use_bias=use_bias,\n initializers={\"w_dw\": tf.ones([])})",
"def __init__(self, kernel_parameter: Union[int, float] = 1, nu=1.5):\n super().__init__(kernel_parameter)\n self.nu = nu",
"def test_calculate_variance_covariance_zero_division_scale(self):\n\n _var_covar = calculate_variance_covariance(22, 620.0, 0.0, 0.6142)\n self.assertAlmostEqual(_var_covar[0][0], -0.0005236216)\n self.assertAlmostEqual(_var_covar[0][1], 0.002995667)\n self.assertAlmostEqual(_var_covar[1][0], 0.002995667)\n self.assertAlmostEqual(_var_covar[1][1], 8.9787221E-06)",
"def initialize_(self, kernel, bias=None):\n dtype = self.body[0].weight.dtype\n device = self.body[0].weight.device\n kernel = torch.tensor(kernel, dtype=dtype, device=device, requires_grad=True)\n assert kernel.shape == self.body[0].weight.shape, 'Wrong kernel shape!'\n if bias is not None:\n bias = torch.tensor(bias, dtype=dtype, device=device, requires_grad=True)\n assert bias.shape == self.body[0].bias.shape, 'Wrong bias shape!'\n self.body[0].weight.data.copy_(kernel)\n self.body[0].bias.data.copy_(bias)",
"def test_cvae_init(self):\n torch.manual_seed(0)\n cvae = VAE(\n in_dim=20,\n nb_classes=2,\n latent_dim=16,\n p_dropout=0.1,\n hidden_dims=[64, 32])\n rng = np.random.default_rng(seed=0)\n x = torch.tensor(rng.random(size=(10, 20))).float()\n rng = np.random.default_rng(seed=0)\n y = rng.integers(low=0, high=2, size=10)\n y_onehot = one_hot(y, nb_classes=2)\n y_onehot = torch.tensor(y_onehot).float()\n [x_hat, _, _, _] = cvae(x, y_onehot, 0)\n x_hat_mean = torch.mean(x_hat).detach().numpy()\n self.assertAlmostEqual(x_hat_mean, -0.0245, 4)"
] | [
"0.64694107",
"0.64663404",
"0.63284683",
"0.6302467",
"0.60142285",
"0.59726274",
"0.5947721",
"0.589947",
"0.5893144",
"0.589087",
"0.5890487",
"0.581995",
"0.5760517",
"0.57496685",
"0.57280266",
"0.5705864",
"0.56650275",
"0.5636754",
"0.56325996",
"0.56217444",
"0.5600975",
"0.5580089",
"0.55646473",
"0.5543633",
"0.55286956",
"0.5527734",
"0.54987717",
"0.5497852",
"0.5476786",
"0.5471861"
] | 0.6615188 | 0 |
Test that the assertion fires for a negative delta time | def test_to_delta_time_positive_difference(with_tf_random_seed, np_time_points):
time_points = tf.constant(np_time_points, dtype=default_float())
with pytest.raises(InvalidArgumentError) as exp:
to_delta_time(time_points)
assert exp.value.message.find("Condition x >= y") >= 0 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_timeout_elapsed_no_exception(self):\n deadline = Deadline(-MS)\n timeout = deadline.timeout(raise_if_elapsed=False)\n self.assertGreater(timeout, -2 * MS)\n self.assertLess(timeout, -MS)",
"def test_negative_timedelta(self):\n @converters.wrap\n def inner_test(param: datetime.timedelta):\n \"\"\"This shouldn't be called, converting should fail.\"\"\"\n pass\n self.assert_raises_request_error(\n lambda: inner_test(param='-60'), 3117\n )",
"def test_validate_delta():\n with pytest.raises(ValueError):\n validate_delta(1.1)\n\n with pytest.raises(ValueError):\n validate_delta(-0.1)\n\n assert validate_delta(0.1) == 0.1",
"def test_timestamp_minus(self, mock):\n mock.configure_mock(**(self.config_payload(-1, -2)))\n self.assertRaises(\n AssertionError,\n lf.lambda_handler, event=self.lambdaevent, context=None)\n mock.client.return_value.update_thing_shadow.assert_not_called()",
"def _badness(self, time):\n return (time - self.expected_time)**2",
"def test_subtract_all_args_less_zero(self):\n try:\n self.assertEqual(subtract(-18, -5), -13)\n except Exception as error:\n print(error)",
"def test_negative_interval(self, Signal):\n blk = ElapsedTime()\n config = {\n 'enrich': {\n 'exclude_existing': True,\n },\n 'units': {\n 'days': '{{ $days }}',\n 'hours': '{{ $hours }}',\n 'minutes': '{{ $minutes }}',\n 'seconds': '{{ $seconds }}',\n },\n 'timestamp_a': self.timestamp_b,\n 'timestamp_b': self.timestamp_a,\n }\n self.configure_block(blk, config)\n\n # process a list of signals\n blk.start()\n blk.process_signals([\n # the default case\n Signal({\n 'days': False,\n 'hours': False,\n 'minutes': False,\n 'seconds': True,\n }),\n # all units\n Signal({\n 'days': True,\n 'hours': True,\n 'minutes': True,\n 'seconds': True,\n }),\n ])\n blk.stop()\n\n # check output\n self.assert_last_signal_list_notified([\n Signal({\n 'seconds': self.total_seconds * -1,\n }),\n Signal({\n 'days': -1,\n 'hours': -12,\n 'minutes': -42,\n 'seconds': -3.142,\n }),\n ])",
"def assert_timeout(self) -> None:",
"def inner_test(param: datetime.timedelta):\n self.assertEqual(param, datetime.timedelta(\n days=3, hours=2, minutes=5, seconds=43\n ))",
"def test_negation(self):\n\n a1 = tuples.Tuple([\"a\", \"b\", \"c\", \"d\"], 1, -2, 3, -4)\n\n a2 = -a1\n\n self.assertEqual(a2,\n tuples.Tuple([\"a\", \"b\", \"c\", \"d\"], -1, 2, -3, 4))",
"def test9(self):\n sig1 = np.array([0, 1, 0])\n sig2 = np.array([0, 0, 1, 0])\n d, p = EventSync.estimate_delay(sig1, sig2)\n self.assertTrue(d == -1)",
"def test_delta_minus(self):\n d = Delta(\"-50\")\n self.assertEqual(d.cmp(0, 50), False)\n self.assertEqual(d.cmp(51, 0), True)\n self.assertEqual(d.cmp(5, 10), False)\n d = Delta(\"-50=\")\n self.assertEqual(d.cmp(50, 0), True)\n d = Delta(\"-50%\")\n self.assertEqual(d.cmp(25, 10), True)\n self.assertEqual(d.cmp(10, 25), False)",
"def test_t(self):\n assert np.isclose(self.stepper.t, self.final_t)",
"def test_subtract_zero_arg(self):\n try:\n self.assertEqual(subtract(0, -6), 7)\n except Exception as error:\n print(f'Got error in {inspect.stack()[0][3]}, {error}')",
"def test_negative(self):\n self.assertFalse(validate_measure_input('-1', self.measures))",
"def test_subtract_all_args_greater_zero(self):\n try:\n self.assertEqual(subtract(30, 16), 15)\n except Exception as error:\n print(f'Got error in {inspect.stack()[0][3]}, {error}')",
"def test_bad_interval(self):\n # Intentionally set a small interval (3 min) to fail.\n interval = np.timedelta64(3, 'm')\n self.assertFalse(utils.check_timestamps(self.times, interval))",
"def test_mock_monotonic_clock__raises_on_negative_step() -> None:\n clock = MockMonotonicClock(0)\n with raises(ClockError):\n clock.step_size = -1",
"def test8(self):\n sig1 = np.array([1, 0, 0, 0])\n sig2 = np.array([0, 1, ])\n d, p = EventSync.estimate_delay(sig1, sig2)\n self.assertTrue(d == -1)",
"def test_duration_argument_is_negative(self):\n with self.assertRaises(ValueError) as cm:\n DurationMixin(duration=-10)\n\n self.assertEqual(\n cm.exception.message,\n 'DurationMixin.duration should be an non-negative float'\n )",
"def check_time():\n times = get_times()\n time_difference = abs((times['local'] - times['target']).total_seconds())\n return time_difference < post_time_tol_seconds",
"def assert_TPVE(self, *args, **kw):\n return self.assertRaises(TimeParserValueError, *args, **kw)",
"def inner_test(param: datetime.timedelta):\n pass",
"def test_total_time_no_end_time(time_record_factory):\n d = datetime.datetime(2018, 10, 1, 15, 26)\n t = time_record_factory(time_start=d, time_end=None)\n expected = datetime.timedelta(0)\n assert t.total_time == expected",
"def test4(self):\n sig1 = np.array([0, 1, 0])\n sig2 = np.array([0, 0, 1])\n d, p = EventSync.estimate_delay(sig1, sig2)\n self.assertTrue(d == -1)",
"def test_with_now_minus_2_days(self):\n self.assertEqual(ageid(self.now - timedelta(2)), 'age3')",
"def test_debt_target_expired(self):\n measurement = self.measurement(\n self.metric(accept_debt=True, debt_target=\"100\", issue_ids=[\"FOO-40\"]),\n count={\"debt_target\": \"100\"},\n issue_status=[{\"status_category\": \"done\", \"issue_id\": \"FOO-40\"}],\n )\n self.assertTrue(measurement.debt_target_expired())",
"def test_minus(self):\n print('test_minus');\n self.assertEqual(90, minus(100, 10))",
"def test_parse_time_with_invalid_interval(self):\n now = datetime(2015, 2, 1, 0, 0, 0)\n self.assert_TPVE(parse_time, \"-0\", now)\n self.assert_TPVE(parse_time, \"-12\", now)\n self.assert_TPVE(parse_time, \"-12fortnights\", now)\n self.assert_TPVE(parse_time, \"-20150101\", now)",
"def test_duration_attribute_is_negative(self):\n d = DurationMixin(duration=10)\n\n with self.assertRaises(ValueError) as cm:\n d.duration = -10\n\n self.assertEqual(\n cm.exception.message,\n 'DurationMixin.duration should be an non-negative float'\n )"
] | [
"0.7007831",
"0.6637345",
"0.6576894",
"0.6474647",
"0.64391077",
"0.6374031",
"0.6313927",
"0.6297262",
"0.62733597",
"0.6259941",
"0.62490773",
"0.62064755",
"0.62022203",
"0.6191",
"0.6171475",
"0.61501443",
"0.610897",
"0.6103212",
"0.6097491",
"0.609512",
"0.6081113",
"0.6078611",
"0.6055014",
"0.60354775",
"0.6035291",
"0.60194445",
"0.6000014",
"0.5996328",
"0.59930754",
"0.59916365"
] | 0.6970346 | 1 |
Compute the derivative of the logpdf with respect to the parameters. | def log_pdf_derivative(x):
return gs.autodiff.jacobian(log_pdf_at_x(x))(base_point) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def log_pdf_derivative_squared(x):\n dlog = log_pdf_derivative(x)\n return gs.einsum(\"...i, ...j -> ...ij\", dlog, dlog)",
"def logpdf(self, x):\n if self.transform is not None:\n x = self.transform(x) \n return (-self.alpha-1)*np.log(x) - (self.beta/float(x))",
"def log_deriv(error):\n return logistic(error) * (1 - logistic(error))",
"def grad_log(self, X):\n # \"\"\"\n # Evaluate the gradients (with respect to the input) of the log density at\n # each of the n points in X. This is the score function.\n\n # X: n x d numpy array.\n XB = np.dot(X, self.B)\n Y = 0.5*XB + self.c\n E2y = np.exp(2*Y)\n # n x dh\n Phi = old_div((E2y-1.0),(E2y+1))\n # n x dx\n T = np.dot(Phi, 0.5*self.B.T)\n S = self.b - X + T\n return S",
"def _evaluate_point_logpdf(args):\n x, data, cho_factor = args\n\n # Use Cholesky decomposition to avoid direct inversion of covariance matrix\n diff = data - x\n tdiff = la.cho_solve(cho_factor, diff.T, check_finite=False).T\n diff *= tdiff\n\n # Work in the log to avoid large numbers\n return logsumexp(-np.sum(diff, axis=1)/2.0)",
"def logpdf(x, a, b):\n with mp.extradps(5):\n a, b = _validate_a_b(a, b)\n x = mp.mpf(x)\n if x < 0 or x > 1:\n return -mp.inf\n return (_fun.xlogy(a - 1, x) + _fun.xlog1py(b - 1, -x)\n - _fun.logbeta(a, b))",
"def logPdf(self,x):\n logPdf = np.log(self.pdf(x))\n return logPdf",
"def logPdf(self,x):\n logPdf = np.log(self.pdf(x))\n return logPdf",
"def logpdf(self, X) -> np.ndarray:\n return self.dist.logpdf(self.inv_trans(X))",
"def logit_deriv(y):\n# if y.any() < 0.0 or y.any() > 1.0:\n# raise Exception\n\n return y*(1-y)",
"def logp_grad(self, xs, ys, fs, **kwargs):",
"def invwish_logpdf(X, S, df):\n d = X.shape[0]\n if df < d:\n raise ValueError('df must be greater than or equal to the number of '\n ' dimensions of S')\n if d != X.shape[1]:\n raise ValueError('X must be square.')\n if S.shape[0] != d or S.shape[1] != d:\n raise ValueError('S must be the same shape as X.')\n\n _, logdet_S = slogdet(S)\n _, logdet_X = slogdet(X)\n\n logpdf = (df/2)*logdet_S - ((df*d/2)*log(2) + multigammaln(df/2, d))\n logpdf += (-(d+df+1)/2)*logdet_X - (1/2)*trace(solve(X.T, S.T))\n\n return logpdf",
"def conditional_logpdf(self, x1, x2 = None):\n f_x2 = self.unconditional_pdf_x2(x2)\n return self.joint_logpdf(x1, x2) - np.log(f_x2)",
"def logpdf(x, p, temperature):\n assert x.shape == p.shape\n tol = 1e-7\n p = np.clip(p, tol, 1 - tol)\n x = np.clip(x, tol, 1 - tol)\n logit_p = logit(p)\n first_term = np.log(temperature) + logit_p - (1 + temperature) * np.log(x) - (1 + temperature) * np.log(1 - x)\n second_term = 2 * np.log((np.exp(logit_p) * (x ** (- temperature))) + (1 - x) ** (- temperature))\n return first_term - second_term",
"def logpdf(self, X) -> np.ndarray:\n raise NotImplementedError",
"def test_log():\n x, y = fwd.Variable(), fwd.Variable()\n f = fwd.log(fwd.sin(x)+y**2)\n dfdx = lambda x, y: np.cos(x) / (np.sin(x)+y**2)\n dfdy = lambda x, y: 2*y / (np.sin(x)+y**2)\n d2fdxdy = lambda x, y: -2*y*np.cos(x) / (np.sin(x)+y**2)**2\n assert equals(f.evaluation_at({x: 1.5, y:2.5}), np.log(np.sin(1.5)+2.5**2))\n assert equals(f.derivative_at(x, {x: 1.5, y:2.5}), dfdx(1.5, 2.5))\n assert equals(f.derivative_at(y, {x: 1.5, y:2.5}), dfdy(1.5, 2.5))\n assert equals(f.derivative_at((x, y), {x: 1.5, y:2.5}), d2fdxdy(1.5, 2.5))\n with pytest.raises(NotImplementedError):\n f.derivative_at(x, {x:1.0, y: 2.0}, order=3)",
"def dlogpdf_dlink(self, link_f, y, extra_data=None):\r\n assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape\r\n grad = self.beta*np.log(self.beta*y) - special.psi(self.beta*link_f)*self.beta\r\n #old\r\n #return -self.gp_link.dtransf_df(gp)*self.beta*np.log(obs) + special.psi(self.gp_link.transf(gp)*self.beta) * self.gp_link.dtransf_df(gp)*self.beta\r\n return grad",
"def dvdlogdp(self):\n return self.dndlogdp.mul(self.v_multiplier)",
"def log_den(self, X):\n raise NotImplementedError()",
"def _derivative_(self, x, diff_param=None):\n return 2*exp(-x**2)/sqrt(pi)",
"def _log_prior_gradients(self):\n x = self._get_params()\n ret = np.zeros(x.size)\n [np.put(ret,i,p.lnpdf_grad(xx)) for i,(p,xx) in enumerate(zip(self.priors,x)) if not p is None]\n return ret",
"def der_log(self, xr, xc=None, out=None):\n if xc is None:\n return self._pder_log(xr, out)\n else:\n return self._pder_log(_np.hstack((xr, xc)), out)",
"def log_pdf(X, parameters):\n check_data_type_column_data(X)\n check_model_params_dict(parameters)\n\n sigma = (1.0/parameters['rho'])**.5\n\n return norm.logpdf(X,parameters['mu'],sigma)",
"def _log_prior_gradients(self):\n if self.priors.size == 0:\n return 0.\n x = self.param_array\n ret = np.zeros(x.size)\n #compute derivate of prior density\n [np.put(ret, ind, p.lnpdf_grad(x[ind])) for p, ind in self.priors.items()]\n #add in jacobian derivatives if transformed\n priored_indexes = np.hstack([i for p, i in self.priors.items()])\n for c,j in self.constraints.items():\n if not isinstance(c, Transformation):continue\n for jj in j:\n if jj in priored_indexes:\n ret[jj] += c.log_jacobian_grad(x[jj])\n return ret",
"def log_pdf(self, x):\n if x < 0:\n raise Exception(\"input value x can't be a negative value!\")\n\n if self.is_fit:\n if x >= 0:\n return -np.log(np.math.factorial(x)) + \\\n x * np.log(self.alpha) - self.alpha\n else:\n return 0\n else:\n raise Exception(\"Distribution doesn't have all parameters set!\")",
"def dlogpdf_dlink(self, link_f, y, Y_metadata=None):\n assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape\n c = np.zeros_like(y)\n if Y_metadata is not None and 'censored' in Y_metadata.keys():\n c = Y_metadata['censored']\n\n val = np.log(y) - link_f\n val_scaled = val/np.sqrt(self.variance)\n val_scaled2 = val/self.variance\n uncensored = (1-c)*(val_scaled2)\n a = (1- stats.norm.cdf(val_scaled))\n # llg(z) = 1. / (1 - norm_cdf(r / sqrt(s2))). * (1 / sqrt(2 * pi * s2). * exp(-1 / (2. * s2). * r. ^ 2));\n censored = c*( 1./a) * (np.exp(-1.* val**2 /(2*self.variance)) / np.sqrt(2*np.pi*self.variance))\n # censored = c * (1. / (1 - stats.norm.cdf(val_scaled))) * (stats.norm.pdf(val_scaled))\n gradient = uncensored + censored\n return gradient",
"def grad_reglog(w, X, y, **kwargs):\n p = np.exp(-y * (np.dot(X, w)))\n P = p / (1. + p)\n return -1 * np.dot(X.T, P * y) / X.shape[0]",
"def log_likelihood_gradients_(self, y, f):\n log_lik = self.evaluate_log_likelihood(y, f)\n f = np.squeeze(f)\n J = jacrev(self.evaluate_log_likelihood, argnums=1)\n H = jacrev(J, argnums=1)\n return log_lik, J(y, f), H(y, f)",
"def dlogpdf_df(self, f, y, extra_data=None):\r\n link_f = self.gp_link.transf(f)\r\n dlogpdf_dlink = self.dlogpdf_dlink(link_f, y, extra_data=extra_data)\r\n dlink_df = self.gp_link.dtransf_df(f)\r\n return chain_1(dlogpdf_dlink, dlink_df)",
"def _log_likelihood_gradients(self):\r\n return np.hstack((self.kern.dK_dtheta(dL_dK=self.dL_dK, X=self.X), self.likelihood._gradients(partial=np.diag(self.dL_dK))))"
] | [
"0.77660567",
"0.70519257",
"0.6876588",
"0.6850686",
"0.67922556",
"0.677512",
"0.67627126",
"0.67627126",
"0.6756322",
"0.67217326",
"0.67073756",
"0.65952337",
"0.6584878",
"0.6568949",
"0.65437335",
"0.6536892",
"0.65261656",
"0.65239733",
"0.651334",
"0.6512677",
"0.6508049",
"0.64886004",
"0.6481579",
"0.6441033",
"0.6440329",
"0.6438905",
"0.64342606",
"0.6430511",
"0.6428168",
"0.640558"
] | 0.82203406 | 0 |
r"""Compute the derivative of the innerproduct matrix. Compute the derivative of the innerproduct matrix of the Fisher information metric at the tangent space at base point. | def inner_product_derivative_matrix(self, base_point):
def pdf(x):
"""Compute pdf at a fixed point on the support.
Parameters
----------
x : float, shape (,)
Point on the support of the distribution
"""
return lambda point: self.information_manifold.point_to_pdf(point)(x)
def _function_to_integrate(x):
pdf_x = pdf(x)
pdf_x_at_base_point = pdf_x(base_point)
pdf_x_derivative = gs.autodiff.jacobian(pdf_x)
pdf_x_derivative_at_base_point = pdf_x_derivative(base_point)
return (
1
/ (pdf_x_at_base_point**2)
* (
2
* pdf_x_at_base_point
* gs.einsum(
"...ij, ...k -> ...ijk",
gs.autodiff.jacobian(pdf_x_derivative)(base_point),
pdf_x_derivative_at_base_point,
)
+ gs.einsum(
"...i, ...j, ...k -> ...ijk",
pdf_x_derivative_at_base_point,
pdf_x_derivative_at_base_point,
pdf_x_derivative_at_base_point,
)
)
)
return quad_vec(_function_to_integrate, *self.support)[0] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def integrability_tensor_derivative(\n self,\n horizontal_vec_x,\n horizontal_vec_y,\n nabla_x_y,\n tangent_vec_e,\n nabla_x_e,\n base_point,\n ):\n raise NotImplementedError",
"def derivatives(x_p, y_p):\r\n # set up the matrix equation\r\n n = x_p.shape[0]\r\n M = np.zeros( [n,n] )\r\n d = np.zeros( [n,1] )\r\n \r\n # fill in the constants where they can be\r\n for i in np.arange(1,n-1 ): # for all but the first and last row\r\n M[i,i-1 ] = ( x_p[i] - x_p[i-1] ) / 6.\r\n M[i,i] = ( x_p[i+1] - x_p[i-1] ) / 3.\r\n M[i,i+1] = ( x_p[i+1] - x_p[i] ) /6.\r\n d[i,0 ] = ( y_p[i+1] - y_p[i] ) / ( x_p[i+1] - x_p[i] ) - ( y_p[i] - y_p[i-1] ) / ( x_p[i] - x_p[i-1] )\r\n \r\n M[0,0],M[-1,-1] = 1.,1. # compactly sets the BCs\r\n \r\n LU = lu.LU_decomp(M) # solves the matrix equations\r\n return lu.FB_sub(LU.Low, LU.Upp, d) # find and return 2nd derivatives\r",
"def gather_derivatives(self):\n self.xdot[0,0:self.n] = self.mdot[0:self.n] \n self.xdot[1,0:self.n] = self.rdot[0:self.n,0]\n self.xdot[2,0:self.n] = self.rdot[0:self.n,1]\n self.xdot[3,0:self.n] = self.rdot[0:self.n,2]\n self.xdot[4,0:self.n] = self.vdot[0:self.n,0]\n self.xdot[5,0:self.n] = self.vdot[0:self.n,1]\n self.xdot[6,0:self.n] = self.vdot[0:self.n,2]\n return self.xdot",
"def dE_mdn(self, x, y, t, w1 = None, w2 = None):\n if w2 == None:\n w2 = self.w2\n M = int(self.M)\n # avoid underrun\n \n alpha, sigma, mu = self.getMixtureParams(y.T)\n #import pdb; pdb.set_trace()\n \n #T = t.T[None, None, :] # note: np.tile is slower than this notation\n T = t.T[None, :]\n \n phi = self._phi(T, mu, sigma)\n aphi = alpha*phi\n pi = aphi / np.sum(aphi, 0)\n \n # derivatives of E with respect to the output variables (s. Bishop 1995, chp. 6.4)\n dE_dy_alpha = alpha - pi\n dE_dy_sigma = - 0.5 * pi * ((np.sum((T-mu)**2 , 1) / sigma) - self.c)\n dE_dy_mu = pi[:,np.newaxis,:] * (mu - T) / sigma[:,np.newaxis,:]\n\n dk = np.zeros([self.ny, x.shape[0]])\n dk[0:M,:] = dE_dy_alpha\n dk[M:2*M,:] = dE_dy_sigma\n \n dk[2*M:] = np.reshape(dE_dy_mu, [M*self.c, x.shape[0]])\n \n # back-propagate the dks\n #t0=datetime.now()\n dEnw1, dEnw2 = self.backward(x, dk, None, w2)\n #print 'eval of dE_mdn:' + str((datetime.now()-t0))\n #dj = (1 - self.z[1:]**2) * np.dot(w2[:,1:].T, dk)\n # evaluate derivatives with respect to the weights\n #dEnw1 = (dj[:,:,np.newaxis]*x[np.newaxis,:,:]).transpose(1,0,2)\n #dEnw2 = (dk[:,:,np.newaxis]*self.z.T[np.newaxis,:,:]).transpose(1,0,2)\n return dEnw1, dEnw2",
"def gather_derivatives(self):\n self.xdot[0,0:self.n] = self.mdot[0:self.n] \n self.xdot[1,0:self.n] = self.rdot[0:self.n,0]\n self.xdot[2,0:self.n] = self.rdot[0:self.n,1]\n self.xdot[3,0:self.n] = self.rdot[0:self.n,2]\n self.xdot[4,0:self.n] = self.vdot[0:self.n,0]\n self.xdot[5,0:self.n] = self.vdot[0:self.n,1]\n self.xdot[6,0:self.n] = self.vdot[0:self.n,2]\n self.xdot[7,0:self.n] = self.rhodot[0:self.n] \n self.xdot[8,0:self.n] = 0\n self.xdot[9,0:self.n] = 0\n self.xdot[10,0:self.n] = self.udot[0:self.n]\n return self.xdot",
"def _derX(self, x, y):\n m = len(x)\n temp = np.zeros((m, self.funcCount))\n for j in range(self.funcCount):\n temp[:, j] = self.functions[j](x, y)\n i = self.argcompare(temp, axis=1)\n dfdx = np.zeros_like(x)\n for j in range(self.funcCount):\n c = i == j\n dfdx[c] = self.functions[j].derivativeX(x[c], y[c])\n return dfdx",
"def _2ndderiv_xyz(self,x,y,z,i,j):\n return -np.pi*self._rhoc_M*self.a**3*self._b*self._c *\\\n _2ndDerivInt(x,y,z,self._a2,self._b2*self._a2,self._c2*self._a2,self.n,i,j)",
"def __call__ ( self , func , x , h , der = False ) :\n\n ## calculate differences \n imax = self.__order + 2 if der else self.__order + 1\n i = 0\n while i < imax : \n j = i + 1\n self.__df[i] = func ( x + j * h ) - func ( x - j * h )\n i += 1\n \n ## 1) calculate 1st derivative \n result = dot_fma ( self.__order + 1 , self.__df , self.__d1 ) / ( self.__sf1 * h ) \n if not der : return result \n \n ## 2) calculate Nth derivative \n dd = dot_fma ( self.__order + 2 , self.__df , self.__d2 ) / ( self.__sf2 * h**(self.__order*2+3) ) \n \n return result, dd",
"def _derY(self, x, y):\n m = len(x)\n temp = np.zeros((m, self.funcCount))\n for j in range(self.funcCount):\n temp[:, j] = self.functions[j](x, y)\n i = self.argcompare(temp, axis=1)\n y = temp[np.arange(m), i]\n dfdy = np.zeros_like(x)\n for j in range(self.funcCount):\n c = i == j\n dfdy[c] = self.functions[j].derivativeY(x[c], y[c])\n return dfdy",
"def det(self):\n if self.x == 0 or self.y == 0:\n return None\n elif self.x == 1 or self.y == 1:\n return self.retrieve(0,0)\n else:\n out = 0.0\n for x in xrange(0, self.x):\n out += self.retrieve(0,x)*self.C(0,x)\n return out",
"def determinant(self):\n if not self.is_square():\n raise(ValueError, \"Cannot calculate determinant of non-square matrix.\")\n if self.h > 2:\n raise(NotImplementedError, \"Calculating determinant not implemented for matrices largerer than 2x2.\")\n\n # TODO - your code here\n if self.h == 1:\n return self.g[0][0] # a 1x1 matrix\n else:\n return ((self.g[0][0] * self.g[1][1]) - (self.g[0][1] * self.g[1][0])) # a 2x2 matrix\n # TODO - your code here",
"def _2ndderiv_xyz(self, x, y, z, i, j):\n return (\n 4.0\n * numpy.pi\n * self._b\n * self._c\n * _2ndDerivInt(\n x,\n y,\n z,\n lambda m: self._mdens(m),\n lambda m: self._mdens_deriv(m),\n self._b2,\n self._c2,\n i,\n j,\n glx=self._glx,\n glw=self._glw,\n )\n )",
"def derivative_matrix(g):\n\n def _(g):\n B = g.B[0].grad\n N = g.N[0]\n P = g.dec.P(1)\n H = np.vstack(P(B(i)) for i in range(N)).T\n return H\n\n return _(g), _(g.dual)",
"def determinant(self):\n if not self.is_square():\n raise(ValueError, \"Cannot calculate determinant of non-square matrix.\")\n if self.h > 2:\n raise(NotImplementedError, \"Calculating determinant not implemented for matrices largerer than 2x2.\")\n \n # TODO - your code here\n if self.h == 1:\n return self.g[0][0];\n else:\n return self.g[0][0]*self.g[1][1]-self.g[0][1]*self.g[1][0];",
"def det2(m):\n\t(a,b), (c,d) = m\n\treturn a*d - b*c",
"def compute_derivs_matrices(vecs, adv_vecs, dt):\n return (adv_vecs - vecs)/(1.*dt)",
"def _evalAndDer(self, x):\n m = len(x)\n fx = np.zeros((m, self.funcCount))\n for j in range(self.funcCount):\n fx[:, j] = self.functions[j](x)\n i = self.argcompare(fx, axis=1)\n y = fx[np.arange(m), i]\n dydx = np.zeros_like(y)\n for j in range(self.funcCount):\n c = i == j\n dydx[c] = self.functions[j].derivative(x[c])\n return y, dydx",
"def _evalAndDer(self, x):\n m = len(x)\n fx = np.zeros((m, self.funcCount))\n for j in range(self.funcCount):\n fx[:, j] = self.functions[j](x)\n i = self.argcompare(fx, axis=1)\n y = fx[np.arange(m), i]\n dydx = np.zeros_like(y)\n for j in range(self.funcCount):\n c = i == j\n dydx[c] = self.functions[j].derivative(x[c])\n return y, dydx",
"def derivative(self, theta):\n diag_gamma = np.dot(theta.T, self.X.T)\n logistic_term = self.logistic_fn(diag_gamma)\n diag_gamma = logistic_term * (1.0 - logistic_term)\n gamma = np.diag(diag_gamma)\n\n # v computation\n diags_v = 1.0 - 2*self.logistic_fn(np.dot(theta.T, self.X.T))\n diags_v = diags_v.reshape((-1, 1))\n diags_v = diags_v*self.X\n assert diags_v.shape == self.X.shape #N*d shape\n\n XtGamma = np.dot(self.X.T, gamma) # d*N shape\n\n # TODO: Verifier car pas sur de mon coup ... et surtout plus long...\n # id = np.eye(self.n_examples).reshape((self.n_examples, self.n_examples, 1))\n # diags_v = diags_v.reshape((self.n_examples, 1, self.dim))\n # v = id*diags_v # n*n*d tensor\n # left = np.tensordot(XtGamma, v, axes=(1, 0)) # shape d*N*d\n # assert left.shape == (self.dim, self.n_examples, self.dim)\n # dg = np.tensordot(left, self.X, axes=(1, 0))\n # dg = np.swapaxes(dg, axis1=-2, axis2=-1)\n\n dg = np.zeros((self.dim, self.dim, self.dim))\n for idx, v_i_diag in enumerate(diags_v.T):\n v_i = np.diag(v_i_diag)\n dg_di = np.dot(np.dot(XtGamma, v_i), self.X)\n dg[:, :, idx] = dg_di\n return dg",
"def derv(self, t, y):\n x = y[0];\n xc = y[1];\n n = y[2];\n\n Bhat = self.G * (1.0 - n) * self.alpha0(t) * (1 - 0.4 * x) * (1 - 0.4 * xc);\n\n dydt = np.zeros(3)\n\n dydt[0] = sp.pi / 12.0 * (xc + Bhat);\n dydt[1] = sp.pi / 12.0 * (self.mu * (xc - 4.0 / 3.0 * pow(xc, 3.0)) - x * (\n pow(24.0 / (0.99669 * self.taux), 2.0) + self.kparam * Bhat));\n dydt[2] = 60.0 * (self.alpha0(t) * (1.0 - n) - self.delta * n);\n\n return (dydt)",
"def calc_derivative(self, array_in, direction1, direction2 = False):\r\n A = array_in.copy()\r\n if direction1 != direction2:\r\n #Remove Nyquist frequency for even sample size and odd order of differentiation\r\n if direction1 == 'x' or direction2 == 'x':\r\n A[0,:] = 0.0\r\n if direction1 == 'y' or direction2 == 'y':\r\n A[:,0] = 0.0\r\n\r\n # Note that 'x' corresponds to the x1 direction, and 'y' to the\r\n # x2 direction\r\n # Perform first derivative in desired direction\r\n if direction1 == 'x':\r\n out = self.deriv_mat_x1*A\r\n elif direction1 == 'y':\r\n out = self.deriv_mat_x2*A\r\n\r\n # Perform second derivative in desired direction\r\n if direction2 == 'x':\r\n out = self.deriv_mat_x1*out\r\n elif direction2 == 'y':\r\n out = self.deriv_mat_x2*out\r\n\r\n return out",
"def determinant(self):\n if self.n_rows != self.n_cols:\n raise Exception('Matrix is not square')\n if self.n_rows == 2:\n return (self.data[0][0] * self.data[1][1]) - (self.data[1][0] * self.data[0][1])\n else:\n echelon, ops = reduce_to_echelon(self.data.copy(), True)\n swaps = sum([1 if row[0] == 'swap' else 0 for row in ops])\n return math.prod([echelon[i][i] for i in range(len(echelon))]) * (-1) ** swaps",
"def _dy(self, T):\n return self._h(np.diff(T)) * self._a / self._m / self._c * np.diff(T) * np.array([1, -1])",
"def diffuse_2d(t,y,D,shape):\n m,n = shape\n Fliq0 = np.reshape(np.ascontiguousarray(y),(m,n))\n dy = np.zeros((m,n))\n\n # Calculate derivatives in the interior\n dy[1:-1, 1:-1] = (\n D * (Fliq0[:-2, 1:-1] - 2 * Fliq0[1:-1, 1:-1] + Fliq0[2:, 1:-1])\n + D * (Fliq0[1:-1, :-2] - 2 * Fliq0[1:-1, 1:-1] + Fliq0[1:-1, 2:])\n )\n # Handle periodic boundary conditions\n #Edges\n dy[0, 1:-1] = (\n D * (Fliq0[-1, 1:-1] - 2 * Fliq0[0, 1:-1] + Fliq0[1, 1:-1])\n + D * (Fliq0[0, :-2] - 2 * Fliq0[0, 1:-1] + Fliq0[0, 2:])\n )\n dy[-1, 1:-1] = (\n D * (Fliq0[-2, 1:-1] - 2 * Fliq0[-1, 1:-1] + Fliq0[0, 1:-1])\n + D * (Fliq0[-1, :-2] - 2 * Fliq0[-1, 1:-1] + Fliq0[-1, 2:])\n )\n dy[1:-1, 0] = (\n D * (Fliq0[:-2, 0] - 2 * Fliq0[1:-1, 0] + Fliq0[2:, 0])\n + D * (Fliq0[1:-1, -1] - 2 * Fliq0[1:-1, 0] + Fliq0[1:-1, 1])\n )\n dy[1:-1, -1] = (\n D * (Fliq0[:-2, -1] - 2 * Fliq0[1:-1, -1] + Fliq0[2:, -1])\n + D * (Fliq0[1:-1, -2] - 2 * Fliq0[1:-1, -1] + Fliq0[1:-1, 0])\n )\n #Corners\n dy[0, 0] = (\n D * (Fliq0[-1, 0] - 2 * Fliq0[0, 0] + Fliq0[1, 0])\n + D * (Fliq0[0, -1] - 2 * Fliq0[0, 0] + Fliq0[0, 1])\n )\n dy[-1, 0] = (\n D * (Fliq0[-2, 0] - 2 * Fliq0[-1, 0] + Fliq0[0, 0])\n + D * (Fliq0[-1, -1] - 2 * Fliq0[-1, 0] + Fliq0[-1, 1])\n )\n dy[0, -1] = (\n D * (Fliq0[-1, -1] - 2 * Fliq0[0, -1] + Fliq0[1, -1])\n + D * (Fliq0[0, -2] - 2 * Fliq0[0, -1] + Fliq0[0, 0])\n )\n dy[-1, -1] = (\n D * (Fliq0[-2, -1] - 2 * Fliq0[-1, -1] + Fliq0[0, -1])\n + D * (Fliq0[-1, -2] - 2 * Fliq0[-1, -1] + Fliq0[-1, 0])\n )\n\n return dy.flatten()",
"def det(self):\n\n if self.rows != self.columns:\n raise ValueError(\"Matrix must be square\")\n\n if self.rows == 1:\n return self.row(1)[0]\n\n if self.rows == 2:\n return self.entry(1,1) * self.entry(2,2) - self.entry(1,2) * self.entry(2,1)\n\n det = 0\n row_to_expand = 1\n\n for i in range(1, self.columns + 1):\n det += self.entry(row_to_expand, i) * self._cofactor(row_to_expand, i)\n\n return det",
"def determinant(self):\n d1 = self._row_1[0] * (self._row_2[1] * self._row_3[2] - self._row_2[2] * self._row_3[1])\n d2 = self._row_1[1] * (self._row_2[0] * self._row_3[2] - self._row_2[2] * self._row_3[0])\n d3 = self._row_1[2] * (self._row_2[0] * self._row_3[1] - self._row_2[1] * self._row_3[0])\n return d1 - d2 + d3",
"def _derY(self, x, y, z):\n m = len(x)\n temp = np.zeros((m, self.funcCount))\n for j in range(self.funcCount):\n temp[:, j] = self.functions[j](x, y, z)\n i = self.argcompare(temp, axis=1)\n y = temp[np.arange(m), i]\n dfdy = np.zeros_like(x)\n for j in range(self.funcCount):\n c = i == j\n dfdy[c] = self.functions[j].derivativeY(x[c], y[c], z[c])\n return dfdy",
"def two_body_problem_derivatives(t, y):\n # E = compute_energy_of_twobodysystem(y)\n # positions\n ra = np.array([y[0:3]])\n rb = np.array([y[3:6]])\n # separation\n r_ab = rb - ra\n deltar = np.linalg.norm(r_ab)\n # accelerations\n rdotdota = Mb * r_ab / (deltar ** 3)\n rdotdotb = - Ma * r_ab / (deltar ** 3)\n # rewrite in column form\n ret = np.concatenate((np.array(y[6:]),rdotdota,rdotdotb), axis=None)\n return ret",
"def det(v_i, v_j):\n return (v_i[0] * v_j[1]) - (v_j[0] * v_i[1])",
"def det(v_i, v_j):\n return (v_i[0] * v_j[1]) - (v_j[0] * v_i[1])"
] | [
"0.65636075",
"0.6383123",
"0.6320185",
"0.6274132",
"0.62254035",
"0.6178101",
"0.6175668",
"0.6127627",
"0.61223304",
"0.61122054",
"0.61108375",
"0.60562176",
"0.60325277",
"0.6020855",
"0.6019261",
"0.6014277",
"0.60129786",
"0.60129786",
"0.5989726",
"0.59852016",
"0.5982169",
"0.59668463",
"0.5935284",
"0.59349674",
"0.5933779",
"0.5933281",
"0.5933101",
"0.5911815",
"0.5894086",
"0.5894086"
] | 0.6529994 | 1 |
Compute the cost function given a set of features / values, and the values for our thetas. | def compute_cost(features, values, theta):
# your code here
error = (values - features.dot(theta))
cost = error.dot(error)
return cost | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def compute_cost(features, values, theta):\r\n m = len(values)\r\n sum_of_square_errors = numpy.square(numpy.dot(features, theta) - values).sum()\r\n cost = sum_of_square_errors / (2*m)\r\n\r\n return cost",
"def compute_cost(features, values, theta):\n m = len(values)\n sum_of_square_errors = np.square(np.dot(features, theta) - values).sum()\n cost = sum_of_square_errors / (2 * m)\n\n return cost",
"def compute_cost(features, values, theta):\n\n npoints = len(values)\n sum_of_square_errors = np.square(np.dot(features, theta) - values).sum()\n cost = sum_of_square_errors / (2*npoints)\n\n return cost",
"def compute(self, F, variables):\n s_0 = self.initial_state_generator(self.num_samples)\n a_0 = self.policy(s_0)\n a_t_plus_1 = a_0\n x_t = F.expand_dims(F.concat(s_0, a_0, dim=1), axis=1)\n cost = 0\n for t in range(self.n_time_steps):\n variables[self.model.X] = x_t\n res = self.model.Y.factor.predict(F, variables, targets=[self.model.Y], num_samples=self.num_samples)[0]\n s_t_plus_1 = res[0]\n\n cost = cost + self.cost_function(s_t_plus_1, a_t_plus_1)\n\n a_t_plus_1 = mx.nd.expand_dims(self.policy(s_t_plus_1), axis=2)\n x_t = mx.nd.concat(s_t_plus_1, a_t_plus_1, dim=2)\n total_cost = F.sum(cost)\n return total_cost, total_cost",
"def _objective_function(self, thetas, X, Y):\n \n # Convert thetas vector to form total_cost can understand\n thetas = self.reshape_thetas(thetas, 'list')\n self.thetas = thetas\n \n # Get cost function value\n fval = self.total_cost(X, Y, thetas)\n \n # Get derivatives using back propagation\n Deltas = self.get_gradients(X, Y)\n dfval = self.reshape_thetas(Deltas, 'vector')\n \n return fval, dfval",
"def cofiCostFunc(self,params, *args):\n\t\tY, R, num_users, num_products, num_features,l = args[0], args[1],args[2], args[3],args[4],args[5]\n\n\t\taux = params.reshape((num_products + num_users, num_features))\n\n\t\tX = aux[0:num_products , :]\n\n\t\tTheta = aux[num_products:, :] \n\n\t\ttest = np.dot(X,Theta.transpose())\n\t\ttest = test - Y\n\t\ttest = np.multiply(test , R)\n\t\ttest = np.power(test,2)\n\t\ttest = test.sum()\n\t\ttest = 0.5 * test\n\n\t\tJ = 0;\n\t\tregularization = (l * 0.5) * np.power(X,2).sum() + np.power(Theta,2).sum()\n\n\t\tJ = test# + regularization\n\n\t\treturn J",
"def eval_cost(self, params, **kwargs):\n raise NotImplementedError",
"def get_cost_updates(self):\n\n y = self.get_hidden_values()\n z = self.get_reconstructed_input(y)\n\n L = T.sum((self.x-z)**2, axis=1)\n\n cost = T.mean(L)\n\n return cost",
"def costFun(self, x):\n\ttmp = x.reshape(self.inp_shape)\n\tc = np.float64(self.calcCost(np.asarray(tmp,dtype=np.float32))) + self.alpha * np.dot(x.T, x)\n\treturn c",
"def cofiCostFunc(params, Y, R, num_users, num_movies, num_features, lbd):\n X = np.reshape(params[:num_movies*num_features], (num_movies, num_features))\n Theta = np.reshape(params[num_movies*num_features:], (num_users, num_features))\n\n # J=sum((X*Theta'-Y)^2) where R[i,j]==1\n h = X.dot(Theta.T)-Y\n M = h**2\n J = (M*R).sum()/2\n reg = lbd/2*((X**2).sum()+(Theta**2).sum())\n J = J+reg\n\n X_grad = (h*R).dot(Theta)+lbd*X\n Theta_grad = (h*R).T.dot(X)+lbd*Theta\n\n grad = np.r_[X_grad.flatten(), Theta_grad.flatten()]\n return J, grad",
"def calculate_cost(theta_values, data):\n population = data[:,0]\n prices = data[:,1]\n total_error = 0\n for i in range(0,len(population)):\n x = array([[1],[population[i]]])\n hypothesis = theta_values.dot(x).flatten() \n squared_error = (hypothesis - prices[i])**2\n total_error += squared_error\n return .5*total_error/len(population) #division by m is just a scaling factor since we're only interested in whether this function is minimized",
"def calcCostFun(self):\n\n self.start()\n F, K = self.model()\n \n return self.costFunction",
"def cost_function(self, config_samples):\n cost = self.work_tracker(config_samples)\n return cost",
"def cost_function(param, Y, R, n_features):\r\n # theta (user, feature), (943, 10): user preference\r\n # X (movie, feature), (1682, 10): movie features\r\n n_movie, n_user = Y.shape\r\n X, theta = deserialize(param, n_movie, n_user, n_features)\r\n\r\n inner = np.multiply(X @ theta.T - Y, R)\r\n\r\n return np.power(inner, 2).sum() / 2",
"def compute_cost(AL, Y):\n pass",
"def _cost_method(self, *args, **kwargs):\n\n cost_val = 0.5 * np.linalg.norm(self.obs_data - self.op(args[0])) ** 2\n\n if 'verbose' in kwargs and kwargs['verbose']:\n print(' - DATA FIDELITY (X):', cost_val)\n\n return cost_val",
"def costFun(self, S, x):",
"def cofiCostFunc(params, Y, R, num_users, num_movies, num_features, reg_lambda, returnCostOnly=False,\n returnGradOnly=False):\n\n # Unfold the U and W matrices from params\n X = params[0:num_movies * num_features].reshape((num_movies, num_features))\n Theta = params[num_movies * num_features:].reshape((num_users, num_features))\n\n errors = (X.dot(Theta.T) - Y) * R\n J = 1 / 2 * np.sum(np.sum(errors ** 2))\n\n penalty = (reg_lambda / 2) * (np.sum(np.sum(Theta ** 2)) + np.sum(np.sum(X ** 2)))\n J = J + penalty\n\n X_grad = errors.dot(Theta) + reg_lambda * X\n Theta_grad = errors.T.dot(X) + reg_lambda * Theta\n\n grad = np.r_[X_grad.flatten(), Theta_grad.flatten()]\n\n if returnGradOnly:\n return grad.flatten()\n if returnCostOnly:\n return J\n\n return J, grad",
"def total_cost(self, X, Y, thetas = None):\n \n if thetas == None:\n thetas = self.thetas\n \n J = 0.0\n m = X.shape[0]\n for x, true_indx in zip(X, Y):\n y = np.zeros(self.noutputs)\n y[true_indx] = 1.\n h_theta = self._forward_prop(x, thetas)[-1]\n J += self.cost(h_theta, y)\n \n return np.sum(J)/m",
"def test_gradient(gradient, thetas, activations_neural, classification_matrix, lambda_value=1, step=1E-4, tolerance=1E-4):\n \n dimensional_error(thetas[-1].shape, gradient[-1].shape)\n\n last_thetas = thetas[-1]\n \n last_thetas_plus_step = thetas[-1] + step\n last_thetas_minus_step = thetas[-1] - step\n\n num_grad_total = pd.DataFrame()\n\n for i in range( gradient[-1].shape[0] ):\n\n\n last_thetas_plus = pd.concat( [last_thetas[0:i], last_thetas_plus_step[i:i+1] , last_thetas[i+1:]] , axis=0 )\n\n last_thetas_minus = pd.concat( [last_thetas[0:i], last_thetas_minus_step[i:i+1], last_thetas[i+1:]] , axis=0 )\n\n last_activation_plus = activation_values(activations_neural[-2], last_thetas_plus ).to_numpy()\n last_activation_minus = activation_values(activations_neural[-2], last_thetas_minus).to_numpy()\n\n cost_plus = cost_function_sigmoid([last_activation_plus] , classification_matrix, [last_thetas_plus] , lambda_value)\n cost_minus = cost_function_sigmoid([last_activation_minus], classification_matrix, [last_thetas_minus], lambda_value)\n\n num_grad = (cost_plus - cost_minus)/(2*step) # it's a column DataFrame\n num_grad_total = pd.concat([num_grad_total, num_grad], axis=1)\n\n num_grad_total = num_grad_total.T\n\n dimensional_error(num_grad_total.shape, gradient[-1].shape)\n\n num_grad_total.index = gradient[-1].index\n num_grad_total.columns = gradient[-1].columns\n\n _ = ( np.abs( gradient[-1].to_numpy() - num_grad_total.to_numpy() ) <= tolerance )\n\n return _, num_grad_total",
"def build_cost_fn_and_opt(lstm_outputs, labels_, learning_rate):\n predictions = tf.contrib.layers.fully_connected(lstm_outputs[:, -1], 1, activation_fn=tf.sigmoid)\n loss = tf.losses.mean_squared_error(labels_, predictions)\n optimzer = tf.train.AdadeltaOptimizer(learning_rate).minimize(loss)\n \n return predictions, loss, optimzer",
"def individual_cost_function(gp, output_trajectory, output_times):\r\n # GET RIGHT PART OF ARRAY\r\n # REFORMAT\r\n # NOISE DATA\r\n # PREDICT NEW VALUES\r\n # GET COST.\r\n X_reshaped = output_times[:,None]\r\n # X_list = GPy_reformat_3D(output_times)\r\n # Y_list = GPy_reformat_3D(output_trajectory)\r\n\r\n # X_list = np.concatenate((X_reshaped,X_reshaped,X_reshaped), axis=1)\r\n X_list = X_reshaped\r\n array1 = output_trajectory.T[:, 0, None]\r\n array2 = output_trajectory.T[:, 1, None]\r\n array3 = output_trajectory.T[:, 2, None]\r\n Y_list = np.concatenate((array1,array2,array3),axis=1)\r\n Y_list = array1\r\n X_list = np.concatenate((X_reshaped,np.zeros_like(X_reshaped)),axis=1)\r\n\r\n\r\n Times_pred_1 = np.concatenate((X_reshaped, np.ones_like(X_reshaped)-1), axis=1)\r\n noise_dict1 = {'output_index': Times_pred_1[:, 1:].astype(int)}\r\n Xpred, Xvar = gp.predict(Times_pred_1,Y_metadata=noise_dict1)\r\n\r\n Times_pred_2 = np.concatenate((X_reshaped, np.ones_like(X_reshaped)), axis=1)\r\n noise_dict2 = {'output_index': Times_pred_2[:, 1:].astype(int)}\r\n Ypred, Yvar = gp.predict(Times_pred_2,Y_metadata=noise_dict2)\r\n\r\n Times_pred_3 = np.concatenate((X_reshaped, np.ones_like(X_reshaped)+1), axis=1)\r\n noise_dict3 = {'output_index': Times_pred_3[:, 1:].astype(int)}\r\n Zpred, Zvar = gp.predict(Times_pred_3,Y_metadata=noise_dict3)\r\n\r\n return gp.log_predictive_density(X_list,Y_list) # ,Y_metadata=noise_dict1) # ARRAY OF ROW INDICES, ARRAY OF COLUMN INDICES, COST\r",
"def getCostFunction(self, evalpts, observations, sigma=None, metric=lambda x: sum(x*x)):\n #XXX: better interface for sigma?\n def _(params):\n ind = 0\n for F, n, ofilt, icheck in zip(self._forwardFactories, self._inputs, \\\n self._outputFilters, self._inputCheckers):\n # check input #XXX: is this worthwile to do?\n my_params = params[ind:ind+n]\n checkQ = icheck(my_params, evalpts)\n if checkQ is not None:\n # some parameters are out of range... returns \"cost\"\n return checkQ\n\n Gm = F(params[ind:ind+n])\n if ind == 0:\n x = ofilt(Gm(evalpts)) \n else:\n x = x + ofilt(Gm(evalpts)) \n ind = ind+n\n if sigma is None:\n x = x - observations\n else:\n x = (x - observations) / sigma\n #return sum(real((conjugate(x)*x)))\n #return sum(x*x) \n return metric(x)\n return _",
"def cost_function(x, N, w, dt):\n yh = np.abs(fftkernel(x, w / dt)) # density\n # formula for density\n C = np.sum(yh ** 2) * dt - 2 * np.sum(yh * x) * \\\n dt + 2 / np.sqrt(2 * np.pi) / w / N\n C = C * N * N\n # formula for rate\n # C = dt*sum( yh.^2 - 2*yh.*y_hist + 2/sqrt(2*pi)/w*y_hist )\n return C, yh",
"def return_terminal_cost_func(TerminalCost='Minimize final angle',\n ReturnGradientAndHessian=False):\n if type(TerminalCost)==str:\n assert TerminalCost in ['Minimize final angle from target angle',\n 'Minimize final angular velocity from target angular velocity'],\\\n \"TerminalCost must be either 'Minimize final angle from target angle' (Default), 'Minimize final angular velocity from target angular velocity'.\"\n else:\n assert type(TerminalCost)==list, \"TerminalCost must be a list of cost types.\"\n for el in TerminalCost:\n assert type(el)==str, \"Each element of TerminalCost must be a string. Not \" + str(type(el)) + \".\"\n assert el in ['Minimize final angle from target angle',\n 'Minimize final angular velocity from target angular velocity'],\\\n \"Each element of TerminalCost must be either 'Minimize final angle from target angle' (Default), 'Minimize final angular velocity from target angular velocity'. '\" + el + \"' not accepted.\"\n\n if \"Minimize final angle from target angle\" in TerminalCost:\n result1 = lambda X,U,dt: k4*(1/2)*(X[0,-1]-TargetAngle)**2\n result1_grad = lambda X,U,dt:\\\n np.matrix([[k4*(X[0,-1]-TargetAngle)],[0]])\n result1_hess = lambda X,U,dt: np.matrix([[k4*1,0],[0,0]])\n else:\n result1 = lambda X,U,dt: 0\n result1_grad = lambda X,U,dt:\\\n np.matrix([[0],[0]])\n result1_hess = lambda X,U,dt: np.matrix([[0,0],[0,0]])\n\n if \"Minimize final angular velocity from target angular velocity\" in TerminalCost:\n result2 = lambda X,U,dt: k5*(1/2)*(X[1,-1]-TargetAngularVelocity)**2\n result2_grad = lambda X,U,dt:\\\n np.matrix([[0],[k5*(X[1,-1]-TargetAngularVelocity)]])\n result2_hess = lambda X,U,dt: np.matrix([[0,0],[0,k5*1]])\n else:\n result2 = lambda X,U,dt: 0\n result2_grad = lambda X,U,dt:\\\n np.matrix([[0],[0]])\n result2_hess = lambda X,U,dt: np.matrix([[0,0],[0,0]])\n\n result = lambda X,U,dt: result1(X,U,dt) \\\n + result2(X,U,dt)\n if ReturnGradientAndHessian:\n result_grad = lambda X,U,dt: result1_grad(X,U,dt) \\\n + result2_grad(X,U,dt)\n result_hess = lambda X,U,dt: result1_hess(X,U,dt) \\\n + result2_hess(X,U,dt)\n return(result,result_grad,result_hess)\n else:\n return(result)",
"def evaluate_trajs(cost, states, controls):\n N = states.shape[0]\n T = controls.shape[1]\n costs = np.zeros(N)\n for i, (x, u) in enumerate(zip(states, controls)):\n for t in range(T):\n costs[i] += cost.stage_cost(x[t],u[t])\n costs[i] += cost.terminal_cost(x[T])\n \n return costs",
"def compute_cost(AL, Y):\n pass",
"def gradient_descent(features, values, theta, alpha, num_iterations):\r\n\r\n m = len(values)\r\n cost_history = []\r\n\r\n for i in range (num_iterations):\r\n \r\n h = numpy.dot(features, theta)\r\n \r\n theta = theta - alpha / m * numpy.dot((h-values),features)\r\n \r\n cost = compute_cost(features, values, theta)\r\n \r\n cost_history.append(cost)\r\n\r\n return theta, pandas.Series(cost_history) # leave this line for the grader\r",
"def return_running_cost_func(RunningCost='Minimize Input Energy'):\n if type(RunningCost)==str:\n assert RunningCost in ['Minimize Input Energy',\n 'Minimize time away from target angle',\n 'Minimize time away from target angular velocity'],\\\n \"RunningCost must be either 'Minimize Input Energy','Minimize time away from target angle', or 'Minimize time away from target angular velocity'.\"\n else:\n assert type(RunningCost)==list, \"RunningCost must be a list of cost types.\"\n for el in RunningCost:\n assert type(el)==str, \"Each element of RunningCost must be a string. Not \" + str(type(el)) + \".\"\n assert el in ['Minimize Input Energy',\n 'Minimize time away from target angle',\n 'Minimize time away from target angular velocity'],\\\n \"Each element of RunningCost must be either 'Minimize Input Energy','Minimize time away from target angle', or 'Minimize time away from target angular velocity'. '\" + el + \"' not accepted.\"\n\n if \"Minimize Input Energy\" in RunningCost:\n result1 = lambda X,U,dt: np.trapz((k3/2)*U**2,dx=dt)\n else:\n result1 = lambda X,U,dt: 0\n\n if \"Minimize time away from target angle\" in RunningCost:\n result2 = lambda X,U,dt: np.trapz(k1*(1/2)*(X[0,1:]-TargetAngle)**2,dx=dt)\n else:\n result2 = lambda X,U,dt: 0\n\n if \"Minimize time away from target angular velocity\" in RunningCost:\n result3 = lambda X,U,dt:\\\n np.trapz(k2*(1/2)*(X[1,1:]-TargetAngularVelocity)**2,dx=dt)\n else:\n result3 = lambda X,U,dt: 0\n\n result = lambda X,U,dt: result1(X,U,dt) \\\n + result2(X,U,dt) \\\n + result3(X,U,dt)\n return(result)",
"def getVectorCostFunction(self, evalpts, observations):\n def _(params):\n forward = self.getForwardEvaluator(evalpts)\n return sum(forward(params) - observations)\n return _"
] | [
"0.72324276",
"0.7181755",
"0.71723294",
"0.69701",
"0.69247854",
"0.6471834",
"0.6227543",
"0.6182345",
"0.617613",
"0.6167203",
"0.6144014",
"0.6132684",
"0.61300564",
"0.6123157",
"0.6107986",
"0.6094501",
"0.60778195",
"0.6051131",
"0.60425967",
"0.60174876",
"0.60173744",
"0.59889996",
"0.59743243",
"0.59706753",
"0.5959947",
"0.59376675",
"0.5933045",
"0.59320205",
"0.5924735",
"0.5899899"
] | 0.74634284 | 0 |
Calculate yj = rj + gamma argmaxQ or yj = rj (terminating state) This is the target value used to train the neural network and it uses the target network to make predictions | def get_target(self, batch):
# initialise array to store yj values
target = np.zeros((len(batch[0]), self.num_actions))
# loop over samples in the minibatch
for j in range(len(batch[0])):
a0_i = self.action_str2idx(batch[1][j])
r0 = batch[2][j]
done = batch[3][j]
s1 = batch[4][j]
# if terminating state
if done:
target[j, a0_i] = r0
else:
qs_target = self.target_Qmodel.predict(s1)
target[j, a0_i] = r0 + self.gamma * np.max(qs_target)
return target | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def Q_net(self, state):\n\t\tif not self._prediction_made: \n\t\t\tQ = tf.matmul(tf.nn.relu( tf.matmul(state, self.weights_hidden) + self.bias_hidden ), self.weights_out) + self.bias_out \n\t\t\tself._Qval = Q\t\n\t\t\tself._prediction_made = True\n\t\treturn self._Qval",
"def final_result(self, board):\n if board.myMarbles>board.opMarbles:\n reward = self.win_value + self.myMarbles - self.opMarbles\n elif board.myMarbles == board.opMarbles:\n reward = self.draw_value\n else:\n reward = self.loss_value + self.myMarbles-self.opMarbles\n self.game_counter += 1\n self.add_game_to_replay_buffer(reward)\n\n # If we are in training mode we run the optimizer.\n if self.training and (self.game_counter > self.pre_training_games):\n\n batch_third = self.batch_size // 3\n train_batch = self.replay_buffer_win.sample(batch_third)\n train_batch.extend(self.replay_buffer_loss.sample(batch_third))\n train_batch.extend(self.replay_buffer_draw.sample(batch_third))\n train_batch = np.array(train_batch)\n\n #\n # Let's compute the target q values for all non terminal move\n # We extract the resulting state, run it through the target net work and\n # get the maximum q value (of all valid moves)\n next_states = [s[2] for s in train_batch if s[2] is not None]\n # print('current board\\n', board)\n # print('next_states', next_states)\n target_qs = []\n\n if len(next_states) > 0:\n firstInput = [self.board_state_to_nn_input(s) for s in next_states]\n # print(firstInput)\n firstInput = np.asarray(firstInput).reshape(20, 1,2,6)[0]\n # print(firstInput.shape)\n # for i in next_states:\n # print(i[0])\n # print(i[1])\n # input()\n probs, qvals = self.get_valid_probs(firstInput,\n self.target_net, [Board(s[0], s[1]) for s in next_states], True)\n # print(probs)\n # print(qvals)\n # input()\n probs=probs[0]\n qvals=qvals[0]\n # print(qvals)\n i = 0\n for t in train_batch:\n if t[2] is not None:\n # print(t[2])\n # print(probs)\n # input()\n max_move = np.argmax(probs)\n max_qval = qvals[max_move]\n target_qs.append(max_qval * self.reward_discount)\n i += 1\n else:\n target_qs.append(t[3])\n\n if i != len(next_states):\n (\"Something wrong here!!!\")\n else:\n target_qs.extend(train_batch[:, 6])\n\n # We convert the input states we have recorded to feature vectors to feed into the training.\n nn_input = [self.board_state_to_nn_input(x[0]) for x in train_batch]\n actions = train_batch[:, 1]\n\n # We run the training step with the recorded inputs and new Q value targets.\n # print(self.q_net.merge.shape)\n # print(self.q_net.train_step.shape)\n # print(np.asarray([self.q_net.merge, self.q_net.train_step]).shape)\n # print(self.q_net.input_positions.shape)\n # print(nn_input.shape)\n # print(self.q_net.target_q.shape)\n # print(target_qs.shape)\n # print(self.q_net.actions.shape)\n # print(actions.shape)\n # print(type(nn_input))\n summary, _ = TFSN.get_session().run([self.q_net.merge, self.q_net.train_step],\n feed_dict={self.q_net.input_positions: np.asarray(nn_input).reshape(20,1,2,6),\n self.q_net.target_q: target_qs,\n self.q_net.actions: actions})\n self.random_move_prob *= self.random_move_decrease\n\n if self.writer is not None:\n self.writer.add_summary(summary, self.game_counter)\n summary = tf.Summary(value=[tf.Summary.Value(tag='Random_Move_Probability',\n simple_value=self.random_move_prob)])\n self.writer.add_summary(summary, self.game_counter)\n\n TFSN.get_session().run(self.graph_copy_op)",
"def train(network_def, target_params, optimizer, states, actions, next_states, rewards,\n terminals, loss_weights, cumulative_gamma, target_opt, mse_inf,tau,alpha,clip_value_min, rng):\n online_params = optimizer.target\n def loss_fn(params, rng_input, target, loss_multipliers):\n def q_online(state):\n return network_def.apply(params, state, rng=rng_input)\n\n q_values = jax.vmap(q_online)(states).q_values\n q_values = jnp.squeeze(q_values)\n replay_chosen_q = jax.vmap(lambda x, y: x[y])(q_values, actions)\n \n if mse_inf:\n loss = jax.vmap(mse_loss)(target, replay_chosen_q)\n else:\n loss = jax.vmap(dqn_agent.huber_loss)(target, replay_chosen_q)\n\n mean_loss = jnp.mean(loss_multipliers * loss)\n return mean_loss, loss\n\n rng, rng2, rng3, rng4 = jax.random.split(rng, 4)\n\n def q_target(state):\n return network_def.apply(target_params, state, rng=rng2)\n\n def q_target_online(state):\n return network_def.apply(online_params, state, rng=rng4)\n\n if target_opt == 0:\n target = dqn_agent.target_q(q_target, next_states, rewards, terminals, cumulative_gamma) \n elif target_opt == 1:\n #Double DQN\n target = target_DDQN(q_target_online, q_target, next_states, rewards, terminals, cumulative_gamma)\n\n elif target_opt == 2:\n #Munchausen\n target = target_m_dqn(q_target_online, q_target, states,next_states,actions,rewards,terminals,\n cumulative_gamma,tau,alpha,clip_value_min)\n else:\n print('error')\n\n grad_fn = jax.value_and_grad(loss_fn, has_aux=True)\n (mean_loss, loss), grad = grad_fn(online_params, rng3, target, loss_weights)\n optimizer = optimizer.apply_gradient(grad)\n return optimizer, loss, mean_loss",
"def evaluate(self):\n # initialize delta_weights\n Loss = 0\n for i, x_test in enumerate(self.X_test):\n Loss += (self.sigmoid(np.dot(self.weights,x_test))-self.y_test[i])**2\n return Loss",
"def evaluate(self, state, epsilon=1e-6, reparam=False):\n\n action_probs = self.forward(state)\n action_pd = GumbelSoftmax(probs=action_probs, temperature=0.9)\n actions = action_pd.rsample() if reparam else action_pd.sample()\n log_probs = action_pd.log_prob(actions)\n return actions, log_probs, None, None, None",
"def predict(self, state):\n if self.phase is None or self.device is None:\n raise AttributeError('Phase, device attributes have to be set!')\n if self.phase == 'train' and self.epsilon is None:\n raise AttributeError('Epsilon attribute has to be set in training phase')\n\n if self.reach_destination(state):\n return ActionXY(0, 0) if self.kinematics == 'holonomic' else ActionRot(0, 0)\n if self.action_space is None:\n self.build_action_space(state.robot_state.v_pref)\n\n probability = np.random.random()\n if self.phase == 'train' and probability < self.epsilon:\n max_action = self.action_space[np.random.choice(len(self.action_space))]\n else:\n max_action = None\n max_value = float('-inf')\n max_traj = None\n\n if self.do_action_clip:\n state_tensor = state.to_tensor(add_batch_size=True, device=self.device)\n action_space_clipped = self.action_clip(state_tensor, self.action_space, self.planning_width)\n else:\n action_space_clipped = self.action_space\n\n for action in action_space_clipped:\n state_tensor = state.to_tensor(add_batch_size=True, device=self.device)\n next_state = self.state_predictor(state_tensor, action)\n max_next_return, max_next_traj = self.V_planning(next_state, self.planning_depth, self.planning_width)\n reward_est = self.estimate_reward(state, action)\n value = reward_est + self.get_normalized_gamma() * max_next_return\n if value > max_value:\n max_value = value\n max_action = action\n max_traj = [(state_tensor, action, reward_est)] + max_next_traj\n if max_action is None:\n raise ValueError('Value network is not well trained.')\n\n if self.phase == 'train':\n self.last_state = self.transform(state)\n else:\n self.traj = max_traj\n\n return max_action",
"def valueIteration(P,R,gamma,theta,initial_v,max_iter=1e8):\n print('Running value iteration ...')\n\n def one_step_lookahead(s, V):\n \"\"\"\n :param state: current state\n :param v: current value estimator\n :return: A, list of optimal action values under current value estimator\n \"\"\"\n num_a = num_actions\n num_S = num_states\n\n A = np.zeros(num_a)\n\n for a in range(num_a):\n for s_prime in range(num_S):\n A[a] += P[s, a, s_prime] * (R[s, a, s_prime] + gamma * V[s_prime])\n return A\n \n # initialization\n v = initial_v \n num_states, num_actions = P.shape[:2]\n k = 0 \n best_actions = [0] * num_states\n delta = 1000\n\n while delta > theta and k <= max_iter:\n delta = 0\n k += 1\n for s in range(num_states):\n action_values = one_step_lookahead(s, v)\n best_action_value = np.max(action_values)\n delta = max(delta, np.abs(best_action_value - v[s]))\n v[s] = best_action_value\n print(delta)\n\n for s in range(num_states):\n A = one_step_lookahead(s, v)\n best_actions[s] = np.argmax(A)\n\n\n print('number of iterations:', k)\n return best_actions, v",
"def learn(self, batch_size, gamma, state_number, priority_scale=1.0):\n if self.use_per:\n (states, actions, rewards, new_states,\n terminal_flags), importance, indices = self.replay_buffer.get_minibatch(batch_size=self.batch_size,\n priority_scale=priority_scale)\n importance = importance ** (1 - self.calc_epsilon(state_number))\n else:\n states, actions, rewards, new_states, terminal_flags = self.replay_buffer.get_minibatch(\n batch_size=self.batch_size, priority_scale=priority_scale)\n\n # Target DQN estimates q-vals for new states\n result_ids = []\n for state in new_states:\n result_ids.append(self.target_dqn.predict.remote(np.expand_dims(state, axis=0)))\n\n results = ray.get(result_ids)\n target_future_v = np.amax(np.array(results).squeeze(), axis=1)\n\n # Calculate targets (bellman equation)\n target_q = rewards + (gamma * target_future_v * (1 - terminal_flags))\n\n # Use targets to calculate loss (and use loss to calculate gradients)\n with tf.GradientTape() as tape:\n trainable_variables = ray.get(self.dqn.trainable_variables.remote())\n tape.watch(trainable_variables)\n\n predict_ids = []\n for state in states:\n predict_ids.append(self.dqn.call.remote(np.expand_dims(state, axis=0)))\n\n q_values = tf.squeeze(tf.stack(ray.get(predict_ids)))\n\n one_hot_actions = tf.keras.utils.to_categorical(actions, self.n_actions,\n dtype=np.float32) # using tf.one_hot causes strange errors\n Q = tf.reduce_sum(tf.multiply(q_values, one_hot_actions), axis=1)\n\n error = Q - target_q\n loss = tf.keras.losses.Huber(delta=1.35)(target_q, Q)\n\n if self.use_per:\n # Multiply the loss by importance, so that the gradient is also scaled.\n # The importance scale reduces bias against situataions that are sampled\n # more frequently.\n loss = tf.reduce_mean(loss * importance)\n\n model_gradients = tape.gradient(loss, trainable_variables)\n self.dqn.apply_gradients.remote(model_gradients, trainable_variables)\n\n if self.use_per:\n self.replay_buffer.set_priorities(indices, error)\n\n return float(loss.numpy()), error",
"def __call__(self, y_true: np.ndarray, y_pred: np.ndarray) -> float:",
"def qlearning(env, iterations=1000, gamma=0.9, alpha=0.1):\n nS = env.nS # number of states\n nA = env.nA # number of actions\n Q_value = np.zeros((nS, nA))\n policy = np.ones((env.nS,env.nA))/env.nA\n epsilon = 1\n s_t1 = env.reset() # reset the environment and place the agent in the start square\n ############################\n # YOUR IMPLEMENTATION HERE #\n # HINT: Don't forget to decay epsilon according to GLIE\n\n curr_state = s_t1\n \n start = time.time() # to time how long convergence takes\n print(\"---Q Learning---\\nTraining Started.\")\n \n for k in range (1, iterations):\n # if (k%10000) == 0:\n # print(\"Now playing iteration: \", k)\n epsilon = 1/k\n curr_action, reward, new_state, done = take_one_step(env, policy, curr_state)\n new_action = sample_action(policy, new_state)\n Q_value[curr_state, curr_action] = Q_value[curr_state, curr_action] + alpha * (reward + gamma * (Q_value[new_state, np.argmax(Q_value[new_state])]) - Q_value[curr_state, curr_action])\n \n # epsilon-greedy policy update\n Q_list = np.argwhere(Q_value[curr_state] == np.amax(Q_value[curr_state])).flatten() # get a list of all indices where Q is maximum, (argmax(Q))\n max_Q = np.random.choice(Q_list.flatten()) # randomly pick from those indices. Picking each index is equally likely.\n for a in range (nA):\n if a == max_Q:\n policy[curr_state][a] = epsilon/nA + (1 - epsilon) # for the chosen maximal index of Q, set the policy to epsilon/m + 1 - epsilon\n else:\n policy[curr_state][a] = epsilon/nA \n \n # print(\"Q_value = {0}\".format(Q_value))\n # print(\"policy = {0}\".format(policy))\n \n if done:\n curr_state = env.reset() # reset the environment and place the agent in the start square\n curr_action = sample_action(policy, curr_state)\n else:\n curr_state = new_state\n curr_action = new_action\n \n stop = time.time()\n print(\"Training Completed.\")\n print(\"It took: {0} iterations and {1} minutes\".format(k,(stop-start)/60))\n \n ############################\n det_policy = np.argmax(Q_value, axis=1)\n return Q_value, det_policy",
"def evaluate(self):\n RV = -self.predict()\n RV += self.Ystar()\n return RV",
"def target_m_dqn(model, target_network, states, next_states, actions,rewards, terminals, \n cumulative_gamma,tau,alpha,clip_value_min):\n \n #----------------------------------------\n q_state_values = jax.vmap(target_network, in_axes=(0))(states).q_values\n q_state_values = jnp.squeeze(q_state_values)\n \n next_q_values = jax.vmap(target_network, in_axes=(0))(next_states).q_values\n next_q_values = jnp.squeeze(next_q_values)\n #----------------------------------------\n\n tau_log_pi_next = stable_scaled_log_softmax(next_q_values, tau, axis=1)\n pi_target = stable_softmax(next_q_values,tau, axis=1)\n replay_log_policy = stable_scaled_log_softmax(q_state_values, tau, axis=1)\n\n #----------------------------------------\n \n replay_next_qt_softmax = jnp.sum((next_q_values-tau_log_pi_next)*pi_target,axis=1)\n\n replay_action_one_hot = nn.one_hot(actions, q_state_values.shape[-1])\n tau_log_pi_a = jnp.sum(replay_log_policy * replay_action_one_hot, axis=1)\n\n #a_max=1\n tau_log_pi_a = jnp.clip(tau_log_pi_a, a_min=clip_value_min,a_max=1)\n\n munchausen_term = alpha * tau_log_pi_a\n modified_bellman = (rewards + munchausen_term +cumulative_gamma * replay_next_qt_softmax *\n (1. - jnp.float32(terminals)))\n \n return jax.lax.stop_gradient(modified_bellman)",
"def jval(self):\n return self.q * self.model.nobs_moms",
"def train_replay(self):\n\n if len(self.memory) < self.train_start:\n return\n\n if self.epsilon > self.epsilon_end:\n self.epsilon -= self.epsilon_decay_step\n\n mini_batch = random.sample(self.memory, self.batch_size)\n\n history = np.zeros((self.batch_size, self.state_size[0],\n self.state_size[1], self.state_size[2]))\n next_history = np.zeros((self.batch_size, self.state_size[0],\n self.state_size[1], self.state_size[2]))\n\n # Initialize the Value targets to optimize\n v_target = np.zeros((self.batch_size,))\n\n action, reward, dead = [], [], []\n\n for i in range(self.batch_size):\n history[i] = np.float32(mini_batch[i][0] / 255.)\n next_history[i] = np.float32(mini_batch[i][3] / 255.)\n action.append(mini_batch[i][1])\n reward.append(mini_batch[i][2])\n dead.append(mini_batch[i][4])\n\n # current state-action values Q(st, at)\n q_outputs = self.q_duelling_part.predict(history)\n\n # TD-values for updating the networks coming from the target model\n if self.target_model is True:\n v_target_value = self.target_v_duelling_part.predict(next_history)\n elif self.target_model is False:\n v_target_value = self.v_duelling_part.predict(next_history)\n\n q_targets = []\n\n for i in range(self.batch_size):\n if dead[i]:\n v_target[i] = reward[i]\n q_outputs[i][action[i]] = reward[i]\n\n else:\n v_target[i] = reward[i] + \\\n self.discount_factor * v_target_value[i]\n q_outputs[i][action[i]] = reward[i] + \\\n self.discount_factor * v_target_value[i]\n\n q_targets.append(q_outputs[i][action[i]])\n\n self.optimizer([history, action, q_targets]) # optimize the state-action-value head\n self.v_duelling_part.fit(history, v_target, epochs=1, verbose=0) # optimize the state-value head",
"def _compute_q_argmax(self):\n self.cur_head = self._sess.run(self.ucb_net._P_argmax,\n {self.state_ph: self.state,\n self.ucb_A_ph: self.ucb_A,\n self.ucb_b_ph: self.ucb_b})[0]\n x = self._sess.run(self._net_outputs.q_heads,\n {self.state_ph: self.state})\n return np.argmax(x[:,:,self.cur_head], axis=1)[0]",
"def learn(self):\r\n \r\n # take a mini-batch from replay experience\r\n cur_batch_size = min(len(self.replay_exp), self.batch_size)\r\n mini_batch = random.sample(self.replay_exp, cur_batch_size)\r\n \r\n # batch data\r\n sample_states = np.ndarray(shape = (cur_batch_size, self.state_size)) # replace 128 with cur_batch_size\r\n sample_actions = np.ndarray(shape = (cur_batch_size, 1))\r\n sample_rewards = np.ndarray(shape = (cur_batch_size, 1))\r\n sample_next_states = np.ndarray(shape = (cur_batch_size, self.state_size))\r\n sample_dones = np.ndarray(shape = (cur_batch_size, 1))\r\n\r\n temp=0\r\n for exp in mini_batch:\r\n sample_states[temp] = exp[0]\r\n sample_actions[temp] = exp[1]\r\n sample_rewards[temp] = exp[2]\r\n sample_next_states[temp] = exp[3]\r\n sample_dones[temp] = exp[4]\r\n temp += 1\r\n \r\n \r\n sample_qhat_next = self.brain_target.predict(sample_next_states)\r\n \r\n # set all Q values terminal states to 0\r\n sample_qhat_next = sample_qhat_next * (np.ones(shape = sample_dones.shape) - sample_dones)\r\n # choose max action for each state\r\n sample_qhat_next = np.max(sample_qhat_next, axis=1)\r\n \r\n sample_qhat = self.brain_policy.predict(sample_states)\r\n \r\n for i in range(cur_batch_size):\r\n a = sample_actions[i,0]\r\n sample_qhat[i,int(a)] = sample_rewards[i] + self.gamma * sample_qhat_next[i]\r\n \r\n q_target = sample_qhat\r\n \r\n self.brain_policy.fit(sample_states, q_target, epochs = 1, verbose = 0)\r\n \r\n \r\n \r\n \"\"\"\r\n \r\n for state, action, reward, next_state, done in mini_batch:\r\n target_Q_s_a = 0 # new target for Q(s,a)\r\n state = np.reshape(state, [1, state_size])\r\n next_state = np.reshape(next_state, [1, state_size])\r\n \r\n # if it is not the terminal state\r\n if not done:\r\n qhat_next = self.brain_target.predict(next_state) # estimate Q(s',a')\r\n target_Q_s_a = reward + self.gamma * np.amax(qhat_next[0]) # because the output is m * n, so we need to consider the dimension [0]\r\n else:\r\n target_Q_s_a = reward\r\n \r\n target_output = self.brain_policy.predict(state) # we will replace target of Q(s,a) for specific a later\r\n target_output[0][action] = target_Q_s_a # new target for state s and action a\r\n \r\n self.brain_policy.fit(state, target_output, epochs = 1, verbose = 0)\r\n \r\n \"\"\"",
"def train(network_def, target_params, optimizer, states, actions, next_states, rewards,\n terminals, loss_weights, target_opt, num_tau_samples, num_tau_prime_samples,\n num_quantile_samples, cumulative_gamma, double_dqn, kappa, tau,alpha,clip_value_min, num_actions,rng):\n online_params = optimizer.target\n def loss_fn(params, rng_input, target_quantile_vals, loss_multipliers):\n def online(state):\n return network_def.apply(params, state, num_quantiles=num_tau_samples, rng=rng_input)\n\n model_output = jax.vmap(online)(states)\n quantile_values = model_output.quantile_values\n quantiles = model_output.quantiles\n chosen_action_quantile_values = jax.vmap(lambda x, y: x[:, y][:, None])(\n quantile_values, actions)\n # Shape of bellman_erors and huber_loss:\n # batch_size x num_tau_prime_samples x num_tau_samples x 1.\n bellman_errors = (target_quantile_vals[:, :, None, :] -\n chosen_action_quantile_values[:, None, :, :])\n # The huber loss (see Section 2.3 of the paper) is defined via two cases:\n # case_one: |bellman_errors| <= kappa\n # case_two: |bellman_errors| > kappa\n huber_loss_case_one = (\n (jnp.abs(bellman_errors) <= kappa).astype(jnp.float32) *\n 0.5 * bellman_errors ** 2)\n huber_loss_case_two = (\n (jnp.abs(bellman_errors) > kappa).astype(jnp.float32) *\n kappa * (jnp.abs(bellman_errors) - 0.5 * kappa))\n huber_loss = huber_loss_case_one + huber_loss_case_two\n # Tile by num_tau_prime_samples along a new dimension. Shape is now\n # batch_size x num_tau_prime_samples x num_tau_samples x 1.\n # These quantiles will be used for computation of the quantile huber loss\n # below (see section 2.3 of the paper).\n quantiles = jnp.tile(quantiles[:, None, :, :],\n [1, num_tau_prime_samples, 1, 1]).astype(jnp.float32)\n # Shape: batch_size x num_tau_prime_samples x num_tau_samples x 1.\n quantile_huber_loss = (jnp.abs(quantiles - jax.lax.stop_gradient(\n (bellman_errors < 0).astype(jnp.float32))) * huber_loss) / kappa\n # Sum over current quantile value (num_tau_samples) dimension,\n # average over target quantile value (num_tau_prime_samples) dimension.\n # Shape: batch_size x num_tau_prime_samples x 1.\n loss = jnp.sum(quantile_huber_loss, axis=2)\n loss = jnp.squeeze(jnp.mean(loss, axis=1), axis=-1)\n\n mean_loss = jnp.mean(loss_multipliers * loss)\n\n return mean_loss, loss\n\n grad_fn = jax.value_and_grad(loss_fn, has_aux=True)\n\n if target_opt == 0:\n rng, target_quantile_vals = target_quantile_values_fun(\n network_def,\n online_params,\n target_params,\n next_states,\n rewards,\n terminals,\n num_tau_prime_samples,\n num_quantile_samples,\n cumulative_gamma,\n double_dqn,\n rng)\n\n elif target_opt == 1:\n rng, target_quantile_vals = munchau_target_quantile_values_fun(\n network_def,\n online_params,\n target_params,\n states,\n actions,\n next_states,\n rewards,\n terminals,\n num_tau_prime_samples,\n num_quantile_samples,\n cumulative_gamma,\n double_dqn,\n rng,\n tau,\n alpha,\n clip_value_min,\n num_actions\n )\n\n else:\n print('error')\n\n rng, rng_input = jax.random.split(rng)\n (mean_loss, loss), grad = grad_fn(online_params, rng_input, target_quantile_vals, loss_weights)\n optimizer = optimizer.apply_gradient(grad)\n return rng, optimizer, loss, mean_loss",
"def learn(self, state, action, reward, next_state):\r\n\r\n \"\"\"Please Fill Your Code Here.\r\n \"\"\"\r\n self.Q[state][action] = self.Q[state][action] + self.alpha * (reward + self.gamma * max(self.Q[next_state]) - self.Q[state][action])\r\n\r\n return 0",
"def update_predict_network(self):\n states, actions, rewards, new_states, is_terminals = self.memory.sample(self.batch_size)\n\n preprocessed_states, preprocessed_new_states = self.preprocessor.process_batch(states, new_states)\n\n actions = self.preprocessor.process_action(actions)\n # update network\n q_values = self.cal_target_q_values(preprocessed_new_states)\n max_q_values = np.max(q_values, axis=1)\n max_q_values[is_terminals] = 0.0\n targets = rewards + self.gamma * max_q_values\n targets = np.expand_dims(targets, axis=1)\n\n self.q_network.train_on_batch([preprocessed_states, actions], targets)\n if self.num_steps % self.target_update_freq ==0:\n print(\"Update target network at %d steps\" % self.num_steps)\n self.update_target_network()",
"def compute_td_loss(self, states, actions, rewards, next_states, is_done, gamma=0.99):\r\n actions = tf.convert_to_tensor(actions) # shape: [batch_size * seq_len]\r\n rewards = tf.convert_to_tensor(rewards) # shape: [batch_size * seq_len]\r\n is_done = tf.convert_to_tensor(is_done) # shape: [batch_size * seq_len]\r\n\r\n actions = tf.reshape(actions, [-1])\r\n rewards = tf.reshape(rewards, [-1])\r\n is_done = tf.reshape(is_done, [-1])\r\n states = tf.reshape(states, [batch_size * max_seq, 1, 4])\r\n next_states = tf.reshape(next_states, [batch_size * max_seq, 1, 4])\r\n # if self.USE_CUDA:\r\n # actions = actions.cuda()\r\n # rewards = rewards.cuda()\r\n # is_done = is_done.cuda()\r\n\r\n # get q-values for all actions in current states\r\n predicted_qvalues = self.DRQN.model.predict(states, steps=1)\r\n # predicted_qvalues = predicted_qvalues.reshape(-1, self.action_space.n)\r\n # predicted_qvalues = predicted_qvalues.squeeze(0)\r\n\r\n # select q-values for chosen actions\r\n # a = np.concatenate(actions)\r\n\r\n # predicted_qvalues_for_actions = predicted_qvalues[\r\n # range(states.shape[0]), actions\r\n # ]\r\n\r\n # compute q-values for all actions in next states\r\n predicted_next_qvalues = self.DRQN_target.model.predict(next_states, steps=1) # YOUR CODE\r\n # predicted_next_qvalues = predicted_next_qvalues.squeeze(0)\r\n predicted_next_qvalues = predicted_next_qvalues.reshape(-1, self.action_space.n)\r\n\r\n # compute V*(next_states) using predicted next q-values\r\n next_state_values = predicted_next_qvalues.max(-1)\r\n next_state_values_arg = predicted_next_qvalues.argmax(-1)\r\n # compute \"target q-values\" for loss - it's what's inside square parentheses in the above formula.\r\n target_qvalues_for_actions = rewards + gamma * next_state_values\r\n\r\n # at the last state we shall use simplified formula: Q(s,a) = r(s,a) since s' doesn't exist\r\n target_qvalues_for_actions = tf.where(\r\n is_done, rewards, target_qvalues_for_actions)\r\n # if is_done:\r\n # target_qvalues_for_actions = rewards\r\n # else:\r\n # target_qvalues_for_actions = target_qvalues_for_actions\r\n for i in range(len(target_qvalues_for_actions)):\r\n j = next_state_values_arg[i]\r\n predicted_qvalues[i][0][j] = target_qvalues_for_actions[i]\r\n # mean squared error loss to minimize\r\n loss = self.DRQN.train(states, predicted_qvalues)\r\n\r\n return loss",
"def update(Q, target_Q, opt, samples, gamma=0.99, target_type='double_dqn'):\n xp = Q.xp\n obs = xp.asarray([sample[0] for sample in samples], dtype=np.float32)\n action = xp.asarray([sample[1] for sample in samples], dtype=np.int32)\n reward = xp.asarray([sample[2] for sample in samples], dtype=np.float32)\n done = xp.asarray([sample[3] for sample in samples], dtype=np.float32)\n obs_next = xp.asarray([sample[4] for sample in samples], dtype=np.float32)\n # Predicted values: Q(s,a)\n y = F.select_item(Q(obs), action)\n # Target values: r + gamma * max_b Q(s',b)\n with chainer.no_backprop_mode():\n if target_type == 'dqn':\n next_q = F.max(target_Q(obs_next), axis=1)\n elif target_type == 'double_dqn':\n next_q = F.select_item(target_Q(obs_next),\n F.argmax(Q(obs_next), axis=1))\n else:\n raise ValueError('Unsupported target_type: {}'.format(target_type))\n target = reward + gamma * (1 - done) * next_q\n loss = mean_clipped_loss(y, target)\n Q.cleargrads()\n loss.backward()\n opt.update()",
"def _build_target_q_op(self):\n targets = []\n for gamma, target_q in zip(self.gammas,\n self._replay_next_target_net_outputs.q_values):\n # Get the maximum Q-value across the actions dimension.\n replay_next_qt_max = tf.reduce_max(target_q, 1)\n\n # Calculate the Bellman target value.\n # Q_t = R_t + \\gamma^N * Q'_t+1\n # where,\n # Q'_t+1 = \\argmax_a Q(S_t+1, a)\n # (or) 0 if S_t is a terminal state,\n # and\n # N is the update horizon (by default, N=1).\n cumulative_gamma = math.pow(gamma, self.update_horizon)\n n_step_reward = self._build_discounted_n_step_rewards(gamma)\n targets.append(n_step_reward + cumulative_gamma * replay_next_qt_max *\n (1. - tf.cast(self._replay.terminals, tf.float32)))\n return targets",
"def get_action(self, history):\n history = np.float32(history / 255.0)\n if np.random.rand() <= self.epsilon:\n return random.randrange(3)\n\n else:\n q_values = self.q_duelling_part.predict(history)\n\n return np.argmax(q_values[0])",
"def target_DDQN(model, target_network, next_states, rewards, terminals, cumulative_gamma):\n next_q_values = jax.vmap(model, in_axes=(0))(next_states).q_values\n next_q_values = jnp.squeeze(next_q_values)\n replay_next_qt_max = jnp.argmax(next_q_values, axis=1)\n next_q_state_values = jax.vmap(target_network, in_axes=(0))(next_states).q_values\n\n q_values = jnp.squeeze(next_q_state_values)\n replay_chosen_q = jax.vmap(lambda t, u: t[u])(q_values, replay_next_qt_max)\n \n return jax.lax.stop_gradient(rewards + cumulative_gamma * replay_chosen_q *\n (1. - terminals))",
"def result(self):\r\n # TODO: how about xcurrent?\r\n return self.best.get() + (\r\n self.countevals, self.countiter, self.gp.pheno(self.mean), self.gp.scales * self.sigma * self.sigma_vec * self.dC**0.5)",
"def _take_action(self, state):\n feed = {self.inputs_: state.reshape((1, *state.shape))}\n Qs = sess.run(self.output, feed_dict=feed)\n return np.argmax(Qs)",
"def target_quantile_values(network, online_params, target_params, states,\n next_states, rewards, terminals,\n num_tau_prime_samples, num_quantile_samples,\n cumulative_gamma, double_dqn, rng):\n rng, rng1, rng2, rng3 = jax.random.split(rng, num=4)\n curr_state_representation = network.apply(\n target_params, states, num_quantiles=num_quantile_samples,\n rng=rng3).representation\n curr_state_representation = jnp.squeeze(curr_state_representation)\n rewards = jnp.tile(rewards, [num_tau_prime_samples])\n is_terminal_multiplier = 1. - terminals.astype(jnp.float32)\n # Incorporate terminal state to discount factor.\n gamma_with_terminal = cumulative_gamma * is_terminal_multiplier\n gamma_with_terminal = jnp.tile(gamma_with_terminal, [num_tau_prime_samples])\n # Compute Q-values which are used for action selection for the next states\n # in the replay buffer. Compute the argmax over the Q-values.\n if double_dqn:\n outputs_action = network.apply(online_params,\n next_states,\n num_quantiles=num_quantile_samples,\n rng=rng1)\n else:\n outputs_action = network.apply(target_params,\n next_states,\n num_quantiles=num_quantile_samples,\n rng=rng1)\n target_quantile_values_action = outputs_action.quantile_values\n target_q_values = jnp.squeeze(\n jnp.mean(target_quantile_values_action, axis=0))\n # Shape: batch_size.\n next_qt_argmax = jnp.argmax(target_q_values)\n # Get the indices of the maximium Q-value across the action dimension.\n # Shape of next_qt_argmax: (num_tau_prime_samples x batch_size).\n next_state_target_outputs = network.apply(\n target_params,\n next_states,\n num_quantiles=num_tau_prime_samples,\n rng=rng2)\n next_qt_argmax = jnp.tile(next_qt_argmax, [num_tau_prime_samples])\n target_quantile_vals = (\n jax.vmap(lambda x, y: x[y])(next_state_target_outputs.quantile_values,\n next_qt_argmax))\n target_quantile_vals = rewards + gamma_with_terminal * target_quantile_vals\n # We return with an extra dimension, which is expected by train.\n next_state_representation = next_state_target_outputs.representation\n next_state_representation = jnp.squeeze(next_state_representation)\n return (\n rng,\n jax.lax.stop_gradient(target_quantile_vals[:, None]),\n jax.lax.stop_gradient(curr_state_representation),\n jax.lax.stop_gradient(next_state_representation))",
"def loss_function(self, q_vals, next_q_vals, rewards, actions, double_q_vals=None):\n with self.graph.as_default():\n with tf.name_scope('loss'):\n \"\"\"\n Calculate the target value(s)\n \"\"\"\n if double_q_vals is not None:\n # Select maximizing action using online network\n max_index = tf.argmax(double_q_vals, axis=1, output_type=tf.int32)\n indices = tf.stack([tf.range(0,self.batch_size), max_index], axis=-1)\n # Evaluate Q using target network\n next_q_acted = tf.gather_nd(next_q_vals, indices)\n else:\n # Select the maximum value of the next_q_vals: max_a Q(s_t+1,a)\n next_q_acted = tf.reduce_max(next_q_vals, axis=1)\n # y = r + gamma * max Q(s_t+1)\n target = tf.add_n([rewards, tf.scalar_mul(self.gamma, next_q_acted)], name='target_values')\n \"\"\"\n Retrieve the Q-value(s) of the given actions\n \"\"\"\n # Q(s_t,a_t)\n indices = tf.stack([tf.range(0,self.batch_size), actions], axis=-1)\n q_acted = tf.gather_nd(q_vals, indices)\n \"\"\"\n Calculate the loss: squared TD-error\n \"\"\"\n # This is the TD-error: y - Q(s_t,a_t)\n diff = tf.subtract(target, q_acted, name='TD_errors')\n # reduce-mean averages the negative and positive td-errors\n td_loss = tf.square(diff, name='squared_TD_errors')\n loss = tf.reduce_mean(td_loss)\n # Squared_TD_errors is the mean-squared-loss we want to minimize in training\n\n return loss, diff",
"def obtain_training_parameters(para, x, y, alg = 'LR'):\n \n \n global omega\n \n # Iterate to find the optimal parameters\n if alg == 'LR': # logistic regression\n omega = np.zeros((3, 1))\n alpha = para.step_size # step size\n for i in range(para.iteration):\n grad = np.zeros((3, 1))\n for i in range(len(x[:, 0])):\n grad += np.reshape(x[i, :], (3, 1)) * (-y[i] + 1 / (1 + np.exp(-np.dot(x[i, :], omega))))\n omega -= alpha * grad \n \n elif alg == 'GNB': # Gaussian Naive Bayes\n # get counts for each class\n itszero = 0\n itsone = 0\n for i in range(len(y)):\n if y[i] == 1:\n itsone += 1\n else:\n itszero += 1\n \n # probability of see y\n theta0 = itszero / len(y)\n theta1 = 1 - theta0\n \n # mean of omega\n mew00 = 0\n mew01 = 0\n mew02 = 0\n mew10 = 0\n mew11 = 0\n mew12 = 0\n for i in range(len(y)):\n if y[i] == 0:\n mew00 += x[i, 0] / itszero\n mew01 += x[i, 1] / itszero\n mew02 += x[i, 2] / itszero\n else:\n mew10 += x[i, 0] / itsone\n mew11 += x[i, 1] / itsone\n mew12 += x[i, 2] / itsone\n \n # variance of omega \n sigma00 = 0\n sigma01 = 0\n sigma02 = 0\n sigma10 = 0\n sigma11 = 0\n sigma12 = 0\n for i in range(len(y)):\n if y[i] == 0:\n sigma00 += (x[i, 0] - mew00)**2 / itszero\n sigma01 += (x[i, 1] - mew01)**2 / itszero\n sigma02 += (x[i, 2] - mew02)**2 / itszero\n else:\n sigma10 += (x[i, 0] - mew10)**2 / itsone\n sigma11 += (x[i, 1] - mew11)**2 / itsone\n sigma12 += (x[i, 2] - mew12)**2 / itsone\n \n # store these parameters into the name \"omage\"\n omega = [theta0, theta1, mew00, mew01, mew02, mew10, mew11, mew12,\n sigma00, sigma01, sigma02, sigma10, sigma11, sigma12] \n \n else: # Gaussian Mixture\n pass\n \n return omega",
"def value_iteration(self):\n #Create a utility function of the environment shape\n gamma = 0.9\n epsilon = 0.01\n iteration = 0\n\n #create a utility function that matches the size of the number of states\n u = np.zeros(self.env.observation_space.n, dtype=float)\n\n u_copy = u.copy()\n\n #Create the reward grid\n reward = np.array([state_map.get(sublist) for state in frozen_lake.MAPS[self.env.spec._kwargs.get('map_name')] for sublist in state])\n\n T = self.frozen_transition()\n\n graph_list = list()\n\n #keep track of the convergence\n policy_convergence = list()\n\n while True:\n delta = 0\n iteration += 1\n u = u_copy.copy()\n graph_list.append(u)\n start_time = time()\n for s in range(self.env.observation_space.n):\n r = reward[s]\n v = np.zeros((1, self.env.observation_space.n), dtype=float)\n v[0, s] = 1.0\n u_copy[s] = self.return_state_utility(v, T, u, r, gamma)\n delta = max(delta, np.abs(u_copy[s] - u[s]))\n policy_convergence.append({'iter': iteration, 'delta': delta})\n if delta < epsilon * (1 - gamma) / gamma:\n print(\"Total Iterations: {}\".format(iteration))\n print(\"=================== VALUE ITERATION RESULT ==================\")\n print(\"Iterations: \" + str(iteration))\n print(\"Delta: \" + str(delta))\n print(\"Gamma: \" + str(gamma))\n print(\"Epsilon: \" + str(epsilon))\n print(\"Time to converge: {} seconds\".format(time() - start_time))\n print(\"===================================================\")\n utility_reshape = np.reshape(u, (int(np.sqrt(self.env.observation_space.n)), int(np.sqrt(self.env.observation_space.n))))\n print (np.array(utility_reshape, dtype=float))\n print(\"===================================================\")\n break\n\n return u"
] | [
"0.6133086",
"0.59240603",
"0.59210104",
"0.58679605",
"0.586299",
"0.5832981",
"0.58184266",
"0.58139116",
"0.58036226",
"0.5798045",
"0.5787385",
"0.5786582",
"0.5784244",
"0.57797366",
"0.5778133",
"0.57767266",
"0.57760173",
"0.5755751",
"0.57456607",
"0.5729801",
"0.57243717",
"0.5693466",
"0.56906044",
"0.5680105",
"0.567885",
"0.56759185",
"0.56671196",
"0.5666579",
"0.56520826",
"0.5650481"
] | 0.59281206 | 1 |
Small function to build the correct argtypes for the LibXC computers | def _build_comute_argtype(num_nd, num_nd_write):
ret = [_xc_func_p, ctypes.c_size_t]
ret += [_ndptr] * num_nd
ret += [_ndptr_w] * num_nd_write
return tuple(ret) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cmd_type(args):",
"def _cast_types(args):\n\targs.x_val = None if args.x_val == 'None' else int(args.x_val)\n\targs.test_size = float(args.test_size)\n\targs.alpha = float(args.alpha)\n\targs.fit_prior = (args.fit_prior in ['True', \"True\", 'true', \"true\"])\n\n\t# class_prior - array like type (problem to convert)\n\tif args.class_prior == \"None\" or args.class_prior == 'None':\n\t\targs.class_prior = None\n\n\t# --------- #\n\treturn args",
"def set_arg_types( self ):\n if self.mode == 'grad':\n self.function = terms.dw_grad\n use_method_with_name( self, self.get_fargs_grad, 'get_fargs' )\n elif self.mode == 'div':\n self.function = terms.dw_div\n use_method_with_name( self, self.get_fargs_div, 'get_fargs' )\n else:\n self.function = self.d_eval\n use_method_with_name( self, self.get_fargs_eval, 'get_fargs' )\n self.use_caches = {'state_in_volume_qp' : [['parameter_s']],\n 'div_vector' : [['parameter_v']]}",
"def determine_arg_locations(self, arg_types): # pragma: no cover\n raise NotImplementedError(\"Implement this\")",
"def processCmdLineArgs(expectedTypes, usage):\n\targs = []\n\tnumComLineArgs = len(sys.argv)\n\tnumExpected = len(expectedTypes)\n\tif (numComLineArgs - 1 == len(expectedTypes)):\n\t\ttry:\n\t\t\tfor i in range(0, numExpected):\n\t\t\t\tif (expectedTypes[i] == typeInt):\n\t\t\t\t\targs.append(int(sys.argv[i+1]))\n\t\t\t\telif (expectedTypes[i] == typeFloat):\n\t\t\t\t\targs.append(float(sys.argv[i+1]))\n\t\t\t\telif (expectedTypes[i] == typeString):\n\t\t\t\t\targs.append(sys.argv[i+1])\n\t\texcept ValueError:\n\t\t\tprint (\"expected number of command line arguments found but there is type mis match\")\n\t\t\tsys.exit(1)\n\telse:\n\t\tprint (\"expected number of command line arguments not found\")\n\t\tprint (usage)\n\t\tsys.exit(1)\n\treturn args",
"def get_check_types():",
"def generate_python_argument_types(argtypes: Union[List, str], outdir: str, prefix: str = 'sc', types=None):\n if type(argtypes) is str:\n argtypes = json.load(open(argtypes, 'r'))\n if not os.path.exists(outdir):\n os.mkdir(outdir)\n type_to_args = collections.defaultdict(set)\n for arg in argtypes:\n argtype = argtypes[arg]\n if types is not None:\n if argtype not in types:\n continue\n type_to_args[argtype].add(arg)\n for argtype in type_to_args:\n real_args = sorted(list(type_to_args[argtype]))\n arguments_to_python(real_args, argtype, outdir, prefix)",
"def _create_args(self, func_args):\n self.llvm_ret_type = self._from_ctype(self.signature.ret_type)\n self.llvm_arg_types = \\\n [self._from_ctype(a) for a in self.signature.arg_ctypes]",
"def get_arg_types(header):\n data_types = [param.dataType for param in header.params]\n\n if not data_types:\n return ArgType.Empty\n elif len(set(data_types)) > 1 or all(data_type == SSE.DUAL for data_type in data_types):\n return ArgType.Mixed\n elif all(data_type == SSE.STRING for data_type in data_types):\n return ArgType.String\n elif all(data_type == SSE.NUMERIC for data_type in data_types):\n return ArgType.Numeric\n else:\n return ArgType.Undefined",
"def command_preparation(wrapped, func, **kwargs_c):\n wrapped.result_type = kwargs_c['result_type']\n (args, varargs, keywords, defaults) = inspect.getargspec(func)\n wrapped.__dict__['arguments'] = []\n wrapped.__dict__['arguments_type'] = {}\n wrapped.__dict__['function_name'] = func.__name__\n wrapped.__dict__['choices'] = {}\n for name_arg in filter(lambda x: x in kwargs_c, args):\n wrapped.choices[name_arg] = kwargs_c[name_arg]\n bias = 1 if 'self' in args else 0 # if first arg is self, see from second\n for index, name in enumerate(args[bias:]):\n wrapped.arguments.append((name, defaults[index]))\n wrapped.arguments_type[name] = utils.get_command_argument_type(defaults[index])",
"def dev_args(devnames):\n devc = len(devnames)\n devnames_type = ctypes.c_char_p * devc\n devnames_arg = devnames_type()\n for idx, val in enumerate(devnames):\n devnames_arg[idx] = (val + chr(0)).encode('ascii')\n return ctypes.c_int(devc), ctypes.cast(\n devnames_arg, ctypes.POINTER(ctypes.c_char_p)\n )",
"def _type_def_helper(name, args, env: Env) -> typing.Tuple[Basic, typing.Dict[str, Undecided]]:\n\n new_basic = make_basic(name)\n env.set_named_type(name, new_basic)\n _ty_args = OrderedDict((arg, Undecided()) for arg in args)\n env.undecided_types.update(_ty_args)\n return new_basic, _ty_args",
"def arg_type(self):\n\n arg_type = self.ctype\n\n if 'int' in arg_type:\n arg_type = 'int'\n\n if self.is_list:\n arg_type = 'list of {}'.format(arg_type)\n\n if 'required' in self.qualifiers:\n arg_type = \"{}, optional\".format(arg_type)\n\n return arg_type",
"def parse_arguments(args):",
"def definearguments(self, customparser):\r\n if not customparser:\r\n return\r\n\r\n self.cmdbase.add_login_arguments_group(customparser)\r\n\r\n customparser.add_argument(\r\n \"--fulltypes\",\r\n dest=\"fulltypes\",\r\n action=\"store_true\",\r\n help=\"Optionally include this flag if you would prefer to \"\r\n \"return the full type name instead of the simplified versions\"\r\n \" (Redfish only option).\",\r\n default=None,\r\n )",
"def _RegisterInputs(self):\n args = []\n for source in ['FcA', 'FcB']:\n gps_type = self._gps_type_per_source[source]\n if gps_type == 'Septentrio':\n args += [\n self._Arg('SeptentrioSolution', source, 'pvt_cartesian.x'),\n self._Arg('SeptentrioSolution', source, 'pvt_cartesian.y'),\n self._Arg('SeptentrioSolution', source, 'pvt_cartesian.z'),\n self._Arg('SeptentrioSolution', source, 'pvt_cartesian.mode'),\n self._Arg('SeptentrioSolution', source,\n 'pvt_cartesian.timestamp.tow'),\n ]\n elif gps_type == 'NovAtel':\n args += [\n self._Arg('NovAtelSolution', source, 'best_xyz.pos_x'),\n self._Arg('NovAtelSolution', source, 'best_xyz.pos_y'),\n self._Arg('NovAtelSolution', source, 'best_xyz.pos_z'),\n self._Arg('NovAtelSolution', source, 'best_xyz.pos_type'),\n self._Arg('NovAtelSolution', source, 'best_xyz.timestamp.tow'),\n ]\n else:\n assert False\n return args",
"def fill_args(cls, toolchain, parser):\n pass # pass must be overloaded (if required)",
"def generateArgsList(self, I1, I2, O1, O2, O3, N, M, S, C ):\n ArgsList = [ \n \"-n\", str(N), \n # \"-m\", str(M), \n # \"-s\", str(S), \n \"-c\", str(C), \n ]\n if I1 > 0 or I2 > 0:\n if I1 > 0:\n ArgsList.append(\"-i1\")\n ArgsList.append(str(I1)) \n if I2 > 0:\n ArgsList.append(\"-i2\")\n ArgsList.append(str(I2))\n else: \n ArgsList.append(\"--noinput\")\n \n if O1 > 0 or O2 > 0 or O3 > 0:\n if O1 > 0:\n ArgsList.append(\"-o1\")\n ArgsList.append(str(O1)) \n if O2 > 0:\n ArgsList.append(\"-o2\")\n ArgsList.append(str(O2))\n if O3 > 0:\n ArgsList.append(\"-o3\")\n ArgsList.append(str(O3))\n else: \n ArgsList.append(\"--nooutput\")\n \n ArgsList.append(\"--nosummary\")\n ArgsList.append(\"--verbose\")\n return ArgsList",
"def convert_dial_attrs_args(attrs, args):\n if attrs == None:\n attrs = {}\n attrs_list = [\"%s=%s\" % (k, v) for k, v in attrs.items()]\n if args == None:\n args = []\n c_attrs = list_of_strings_to_c_string_array(list(attrs_list)+[None])\n c_argv = list_of_strings_to_c_string_array(list(args)+[None])\n return c_attrs, c_argv",
"def get_arguments():\n\tparser.add_argument('-i', '--interface', help='interface to affect')\n\tparser.add_argument('-m','--mac', help='mac to allocate')\n\n\targs = parser.parse_args()\n\tinterface = args.interface\n\tmac = args.mac\n\treturn (interface, mac)",
"def get_cmd_args():\n\n\n\t#Creates the Argument Parser\n\tparser = ArgumentParser(description = \"ID Lab qPCR Analysis v\" + VERSION + \" \" + QUALITY)\n\n\t#Adds the input file argument\n\tparser.add_argument('-f', '--file',\n\t\t\t\tnargs = '+',\n\t\t\t\ttype = FileType('r'),\n\t\t\t\trequired = True)\n\n\t#Adds the output directory\n\tparser.add_argument('-o', '--output',\n\t\t\t\trequired = True)\n\n\t#Adds the model argument, to select between the three models\n\tparser.add_argument('-m', '--mod', '--model',\n\t\t\t\tnargs = '?',\n\t\t\t\tchoices = ['relative', 'absolute', 'stability'],\n\t\t\t\trequired = True)\n\n\t#Adds the control genes argument, taking a list of gene names\n\tparser.add_argument('-cg', '--cgenes', '--controlgenes',\n\t\t\t\tnargs = '+',\n\t\t\t\trequired = True)\n\n\t#Adds the optional control sample argument for the stability model, taking a list of sample names\n\tparser.add_argument('-cs', '--csample', '--controlsamples',\n\t\t\t\tnargs = '*')\n\n\t#Adds optional outlier cutoff\n\tparser.add_argument('-oc', '--ocutoff',\n\t\t\t\ttype = float,\n\t\t\t\tdefault = 0.3)\n\n\t#Adds optional max outliers\n\tparser.add_argument('-om', '--omax',\n\t\t\t\ttype = float,\n\t\t\t\tdefault = 0.5)\n\n\t#Adds optional encoding \n\tparser.add_argument('-e', '--encoding',\n\t\t\t\tdefault = 'ISO-8859-1')\n\n\t#Adds optional header size\n\tparser.add_argument('-hd', '--header',\n\t\t\t\tdefault = 47)\n\n\treturn vars(parser.parse_args())",
"def make_args(port, n, t, population, test=None, value=0, failure=None, tx_rate=0, loglevel=logging.INFO, output=None,\n broadcast=True, fan_out=10, profile=None, validate=False, ignore_promoter=False):\n res = [str(port), str(n), str(t), str(population)]\n\n if test is not None:\n res.append('--test')\n res.append(test)\n\n res.append('--value')\n res.append(str(value))\n\n if failure is not None:\n res.append('--failure')\n res.append(failure)\n\n res.append('--tx-rate')\n res.append(str(tx_rate))\n\n if loglevel == logging.DEBUG:\n res.append('--debug')\n elif loglevel == logging.INFO:\n res.append('-v')\n\n # None represents stdout\n if output is not None:\n res.append('-o')\n res.append(output)\n\n if broadcast:\n res.append('--broadcast')\n\n res.append('--fan-out')\n res.append(str(fan_out))\n\n if profile:\n res.append('--profile')\n res.append(profile)\n\n if validate:\n res.append('--validate')\n\n if ignore_promoter:\n res.append('--ignore-promoter')\n\n return res",
"def universal_args(self):\n args = list(self.BASIC_ARGS)\n # Set ATF to be the bios\n args += [\"-bios\", \"%s/bl1.bin\" % self.config.atf]\n\n if self.config.linux:\n args += [\n \"-kernel\",\n \"%s/arch/arm64/boot/Image\" % self.config.linux\n ]\n args += [\"-append\", self.LINUX_ARGS]\n\n if self.config.android:\n args += self.android_drives_args()\n\n return args",
"def create_arg_list(self):\n\n sim = self.sim\n\n py_kernel_args = sim.kernel_args # Python variables that are passed into the kernel\n gen_kernel_args = sim.ctx_info['kernel_arguments'] # A list of needed kernel arguments from kernel autogen (Mako)\n\n list_for_kernel = gen_kernel_args[self.short_name]\n\n python_args_needed = [z[0] for z in list_for_kernel]\n\n self.arg_list = [py_kernel_args[z] for z in python_args_needed]\n\n # Loop over the arg_list...if the argument is a function, call it!\n for i in range(len(self.arg_list)):\n value = self.arg_list[i]\n if inspect.isfunction(value):\n self.arg_list[i] = value()\n\n additional_cl_args = [sim.queue, self.kernel_global_size, self.kernel_local_size]\n\n self.arg_list = additional_cl_args + self.arg_list",
"def get_argdict(cls, toolchain, args):\n return {} # Empty must be overloaded (if required)",
"def build_argv(software: str, receptor: str, ligand: str,\n center: Tuple[float, float, float],\n size: Tuple[int, int, int] = (10, 10, 10),\n ncpu: int = 1, name: Optional[str] = None, path: str = '.',\n extra = Optional[List[str]]) -> Tuple[List[str], str, str]:\n if software not in {'vina', 'smina', 'psovina', 'qvina'}:\n raise ValueError(f'Invalid docking program: \"{software}\"')\n\n path = Path(path)\n if not path.is_dir():\n path.mkdir(parents=True)\n\n name = name or (Path(receptor).stem+'_'+Path(ligand).stem)\n extra = extra or []\n\n out = path / f'{software}_{name}_out.pdbqt'\n log = path / f'{software}_{name}_log.txt'\n \n argv = [\n software, f'--receptor={receptor}', f'--ligand={ligand}',\n f'--center_x={center[0]}',\n f'--center_y={center[1]}',\n f'--center_z={center[2]}',\n f'--size_x={size[0]}', f'--size_y={size[1]}', f'--size_z={size[2]}',\n f'--cpu={ncpu}', f'--out={out}', f'--log={log}', *extra\n ]\n\n return argv, out, log",
"def get_command_line_args(argv):\n # Initialize the arguments to their default values \n\n args = {'startdate': '20200101',\n 'enddate': '20200102',\n 'outfile': 'test.nc',\n 'dt': 5,\n 'real': True,\n 'south': False,\n 'tcv': False,\n 'substorm': False,\n 'ions': False,\n 'move': False,\n 'cusp': False}\n\n arg_type = {'startdate': str,\n 'enddate': str,\n 'outfile': str,\n 'dt': float,\n 'real': bool,\n 'south': bool,\n 'tcv': bool,\n 'substorm': bool,\n 'ions': bool,\n 'move': bool,\n 'cusp': bool}\n \n # If there is input, set default help to False\n args['help'] = False if len(argv) > 0 else True\n \n # Cycle through all arguments except the first, saving input\n for arg in argv:\n # Treat the file list and formatting seperately\n if arg.find('-') == 0:\n # This is not a filename, remove the dash to get the key\n split_arg = arg.split('=')\n akey = split_arg[0][1:]\n # Get the argument value as the desired type\n if akey not in arg_type.keys():\n raise ValueError(''.join(['unknown command line input, ',\n arg, ', try -help for details']))\n\n if len(split_arg) == 1:\n if arg_type[akey] == bool:\n arg_val = True\n else:\n raise ValueError('expected equality after flag {:}'.format(\n akey))\n else:\n if arg_type[akey] == int:\n arg_val = int(split_arg[1])\n elif arg_type[akey] == float:\n arg_val = float(split_arg[1])\n elif arg_type[akey] == str:\n arg_val = split_arg[1]\n else:\n # This is boolean input\n arg_val = bool_string(split_arg[1])\n\n args[akey] = arg_val\n \n return args",
"def get_arg(instruction, itype):\n\n if itype == itypes.family_code:\n return instruction[7:2]\n elif itype == itypes.opcode:\n return instruction[7:]\n elif itype == itypes.funct3:\n return instruction[15:12]\n elif itype == itypes.funct7:\n return instruction[32:25]\n elif itype == itypes.rs1:\n return instruction[20:15]\n elif itype == itypes.rs2:\n return instruction[25:20]\n elif itype == itypes.imm12lo:\n return concat(instruction[32], instruction[7], instruction[31:27])\n elif itype == itypes.imm12hi:\n return concat(instruction[27:25], instruction[12:8])\n elif itype == itypes.instruction_id:\n return instruction[15:12]\n elif itype == itypes.rd:\n return instruction[12:7]\n elif itype == itypes.imm12:\n return instruction[32:20]\n elif itype == itypes.imm12_sb:\n return concat(instruction[32:25], instruction[12:7])\n elif itype == itypes.imm20:\n return concat(instruction[31], instruction[20:12], instruction[20], instruction[31:21])\n elif itype == itypes.imm20_pc:\n return instruction[31:12]\n elif itype == itypes.shamtw:\n return instruction[25:20]\n elif itype == itypes.shamt:\n return instruction[25:20]\n else:\n return None",
"def get_init_arguments_and_types(cls) -> List[Tuple[str, Tuple, Any]]:\n trainer_default_params = inspect.signature(cls).parameters\n name_type_default = []\n for arg in trainer_default_params:\n arg_type = trainer_default_params[arg].annotation\n arg_default = trainer_default_params[arg].default\n try:\n arg_types = tuple(arg_type.__args__)\n except AttributeError:\n arg_types = (arg_type,)\n\n name_type_default.append((arg, arg_types, arg_default))\n\n return name_type_default",
"def convert_arg((arg, attrs, mode, typ, name)):\n iorname = name\n return iorname, (arg, attrs, mode, typ, name)"
] | [
"0.6376518",
"0.58763367",
"0.58502054",
"0.5712465",
"0.5642069",
"0.5633545",
"0.56088585",
"0.5531215",
"0.5515016",
"0.550904",
"0.5504691",
"0.5500384",
"0.54895383",
"0.5458106",
"0.5442237",
"0.53984636",
"0.5386691",
"0.5374875",
"0.53544724",
"0.5347694",
"0.5341656",
"0.5330729",
"0.53167206",
"0.5298466",
"0.5288069",
"0.52814376",
"0.52719194",
"0.526808",
"0.52661186",
"0.5257266"
] | 0.6243665 | 1 |
Returns the LibXCFunctional family. | def get_family(self):
return self._family | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_01_GetFamily(self):\n self.m_device_obj.DeviceFamily = TESTING_FAMILY_NAME_1\n l_family = FamUtil.get_family(self.m_device_obj)\n # print(PrettyFormatAny.form(l_family, 'B3-01-A - Family'))\n self.assertEqual(l_family, TESTING_FAMILY_NAME_1)",
"def test_02_GetFamily(self):\n self.m_device_obj.DeviceFamily = TESTING_FAMILY_NAME_2\n l_family = FamUtil.get_family(self.m_device_obj)\n # print(PrettyFormatAny.form(l_family, 'B3-02-A - Family'))\n self.assertEqual(l_family, TESTING_FAMILY_NAME_2)",
"def device_family(self):\n return self._dll.JLINKARM_GetDeviceFamily()",
"def family(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"family\")",
"def family(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"family\")",
"def family(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"family\")",
"def get_family(self):\n # Implemented from template for osid.resource.ResourceLookupSession.get_bin\n return self._catalog",
"def get_family(self):\n # Implemented from template for osid.resource.ResourceLookupSession.get_bin\n return self._catalog",
"def get_family(self):\n # Implemented from template for osid.resource.ResourceLookupSession.get_bin\n return self._catalog",
"def family(self) -> Optional[str]:\n return pulumi.get(self, \"family\")",
"def family(self) -> Optional[str]:\n return pulumi.get(self, \"family\")",
"def family(self) -> Optional[str]:\n return pulumi.get(self, \"family\")",
"def read_device_family(self):\n family = ctypes.c_int()\n\n result = self._lib.NRFJPROG_read_device_family(ctypes.byref(family))\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)\n\n return DeviceFamily(family.value).name",
"def family(self):",
"def get_family_name(self):\n return self.family_name",
"def GetFamily(*args, **kwargs):\n return _gdi_.Font_GetFamily(*args, **kwargs)",
"def model_family(self) -> str:\n return self._model_family",
"def af(self):\n return self.opts_family",
"def get_family(self, family_id):\n return self.__make_api_call('get/family/{}'.format(family_id))",
"def GetFamilyString(*args, **kwargs):\n return _gdi_.Font_GetFamilyString(*args, **kwargs)",
"def _check_family(self):\n return",
"def getFamilyName(self):\n return _libsbml.ModelCreator_getFamilyName(self)",
"def test_02_GetFamilyObj1(self):\n self.m_device_obj.DeviceFamily = TESTING_FAMILY_NAME_1\n l_obj = FamUtil._get_family_obj(self.m_pyhouse_obj, self.m_device_obj)\n # print(PrettyFormatAny.form(l_obj, 'B2-02-A - Family'))\n self.assertEqual(l_obj.Name, TESTING_FAMILY_NAME_1)\n self.assertEqual(l_obj.Active, True)\n self.assertEqual(l_obj.Key, 1)\n self.assertEqual(l_obj.FamilyDevice_ModuleName, 'Insteon_device')\n self.assertEqual(l_obj.FamilyPackageName, 'Modules.Families.Insteon')\n self.assertEqual(l_obj.FamilyXml_ModuleName, 'Insteon_xml')",
"def family(self):\n return self.sock.family",
"def find_family(self, needle):\n return self.__make_api_call('find/family/{}'.format(needle))",
"def navigation_type(self):\n return 'Family'",
"def test_01_Family(self):\n l_xml = self.m_xml.light_sect[0]\n print(PrettyFormatAny.form(l_xml, 'C3-01-A - XML'))\n l_device = self.m_device_obj\n l_light = FamUtil.read_family_data(self.m_pyhouse_obj, l_device, l_xml)\n print(PrettyFormatAny.form(l_light, 'C3-01-B - Light'))\n self.assertEqual(l_device.Name, TESTING_LIGHT_NAME_0)\n self.assertEqual(l_light.InsteonAddress, convert.dotted_hex2int(TESTING_INSTEON_ADDRESS_0))",
"def test_04_GetFamilyObj3(self):\n self.m_device_obj.DeviceFamily = TESTING_FAMILY_NAME_3\n l_obj = FamUtil._get_family_obj(self.m_pyhouse_obj, self.m_device_obj)\n # print(PrettyFormatAny.form(l_obj, 'B2-04-A - Family'))\n self.assertEqual(l_obj.Name, TESTING_FAMILY_NAME_3)\n self.assertEqual(l_obj.Active, True)\n self.assertEqual(l_obj.Key, 3)\n self.assertEqual(l_obj.FamilyDevice_ModuleName, 'X10_device')\n self.assertEqual(l_obj.FamilyPackageName, 'Modules.Families.X10')\n self.assertEqual(l_obj.FamilyXml_ModuleName, 'X10_xml')",
"def get_nh_family(self):\n return int(self.get('nhr_family'))",
"def family_name(self):\n return FAMILY_NAME"
] | [
"0.6671878",
"0.66315264",
"0.6380709",
"0.6380162",
"0.62706983",
"0.62706983",
"0.62412816",
"0.62412816",
"0.62412816",
"0.6231695",
"0.6231695",
"0.6231695",
"0.6102895",
"0.6017814",
"0.5974126",
"0.5966167",
"0.58548194",
"0.57114094",
"0.5633405",
"0.5601125",
"0.5577917",
"0.55569565",
"0.552615",
"0.54889655",
"0.5476718",
"0.5444216",
"0.5440335",
"0.54299563",
"0.5403442",
"0.5388547"
] | 0.68424135 | 0 |
Returns the VV10 (b, C) coefficients | def get_vv10_coef(self):
if self._nlc_b is False:
raise ValueError("get_vv10_coeff can only be called on -V functionals.")
return (self._nlc_b, self._nlc_C) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def coefficients(self):\r\n return self.coef_['x']",
"def coefficients(self) :\n raise NotImplementedError",
"def b_coefficients(x1,x2,x3,y1,y2,y3,CCoefficients,DCoefficients):\n\tBCoefficients = np.array([\t((y2-y1)/(x2-x1)-CCoefficients[0]*(x2-x1) - DCoefficients[0]*((x2-x1)**2)), \\\n\t\t\t\t\t\t\t\t((y3-y2)/(x3-x2)-CCoefficients[1]*(x3-x2) - DCoefficients[1]*((x3-x2)**2)) \t]).astype(float)\n\treturn(BCoefficients)",
"def b10(self,k1,k2,c):\n return 2.0/3.0*(2.0+self.mu)*c.pkInterp(k1)*c.pkInterp(k2)",
"def coefficients(self) :\n return self.__coefficients",
"def test_coefficients(self):\n\n coefs = self.cs.coefficients\n\n self.assertEqual(coefs, (1, 0, 1, 0, 0, -1))",
"def coefficients(self):\n return self._coefficients",
"def coefficients(self):\n return self._coefficients",
"def c_coefficients(x1,x2,x3,y1,y2,y3,initial_slope,final_slope):\n\tC = c_matrix(x1,x2,x3)\n\ty = y_vector(x1,x2,x3,y1,y2,y3,initial_slope,final_slope)\n\tCCoefficients = np.dot(inv(C),y)\n\treturn(CCoefficients)",
"def coefficients(k, xi, x):\n\n import pyweno.cnonuniform\n\n x = np.asarray(x, np.float64)\n xi = np.asarray(xi, np.float64)\n\n nc = len(x) - 1\n n = len(xi)\n c = np.zeros((nc, n, k, k), np.float64)\n beta = np.zeros((nc, k, k, k), np.float64)\n varpi = np.zeros((nc, n, k), np.float64)\n\n pyweno.cnonuniform.nonuniform_coeffs(k, xi, x, c, beta, varpi)\n\n return c, beta, varpi",
"def get_coefficients(self):\n return self.coefficients",
"def get_coefficients(self):\n return self.coefficients",
"def coefficients(self):\n if self._coefficients is None:\n return np.hstack([c.coefficients for c in self._traces])\n return self._coefficients",
"def get_base_coefs(mv):\n\trs = []\n\tfor bs in bases:\n\t\tt = []\n\t\tfor b in bs:\n\t\t\tt.append(mv.coef(b))\n\t\t\t\t\t\n\t\trs.append(t)\t\t\n\treturn rs",
"def langevin_coefficients(\n temperature,\n dt,\n friction,\n masses):\n vscale = np.exp(-dt*friction)\n if friction == 0:\n fscale = dt\n else:\n fscale = (1-vscale)/friction\n kT = BOLTZ * temperature\n nscale = np.sqrt(kT*(1-vscale*vscale)) # noise scale\n invMasses = 1.0/masses\n sqrtInvMasses = np.sqrt(invMasses)\n\n ca = vscale\n cb = fscale*invMasses\n cc = nscale*sqrtInvMasses\n return ca, cb, cc",
"def coefficients(dataset):\r\n x = [row[0] for row in dataset]\r\n y = [row[1] for row in dataset]\r\n x_mean, y_mean = mean(x), mean(y)\r\n b1 = covariance(x, x_mean, y, y_mean) / variance(x, x_mean)\r\n b0 = y_mean - b1 * x_mean\r\n return [b0, b1]",
"def coefficients(self) -> np.ndarray:\n return self._coefficients",
"def circuit(V, I0, L, C, alpha, beta):\n Vdot = [V[0], V[0]/(psi(np.pi(/2))) # first and second derivative of V\n return Vdot[1] - (1/C) * (alpha - 3*gamma*V[0]**2)*Vdot[0] + 1/(L*C)*V[0]",
"def coef_val():\n\n basepath = path.join(path.dirname(path.realpath('__file__')), 'data')\n fdata = basepath + path.sep + 'VAWTPolySurfaceCoef_pub.csv' # published coefficients from paper\n # fdata = basepath + path.sep + 'VAWTPolySurfaceCoef.csv' # polynomial surface fitting coefficients\n\n loc1 = np.zeros(10)\n loc2 = np.zeros(10)\n loc3 = np.zeros(10)\n spr1 = np.zeros(10)\n spr2 = np.zeros(10)\n skw1 = np.zeros(10)\n skw2 = np.zeros(10)\n scl1 = np.zeros(10)\n scl2 = np.zeros(10)\n scl3 = np.zeros(10)\n\n f = open(fdata)\n csv_f = csv.reader(f)\n\n i = 0\n for row in csv_f:\n if i != 0:\n loc1[i-1] = float(row[0])\n loc2[i-1] = float(row[1])\n loc3[i-1] = float(row[2])\n spr1[i-1] = float(row[3])\n spr2[i-1] = float(row[4])\n skw1[i-1] = float(row[5])\n skw2[i-1] = float(row[6])\n scl1[i-1] = float(row[7])\n scl2[i-1] = float(row[8])\n scl3[i-1] = float(row[9])\n i += 1\n\n f.close()\n\n return loc1,loc2,loc3,spr1,spr2,skw1,skw2,scl1,scl2,scl3",
"def b11(self,k1,k2,c):\n return (k1/k2+k2/k1)*c.pkInterp(k1)*c.pkInterp(k2)",
"def coeff(self):\n return self._coeff",
"def coefficients(self, force_characters = False) :\n raise NotImplementedError",
"def d_coefficients(x1,x2,x3,CCoefficients):\n\tDCoefficients = np.array([\t(CCoefficients[1]-CCoefficients[0])/(3*(x2-x1)), \\\n\t\t\t\t\t\t\t\t(CCoefficients[2]-CCoefficients[1])/(3*(x3-x2))\t], \\\n\t\t\t\t\t\t\t\tfloat)\n\treturn(DCoefficients)",
"def coefC(x0,y0,x1,y1):\n return (x1*y0-x0*y1)/(x1-x0)",
"def find_coefficients(self):\n self.make_matrix()\n self.coeffs = np.linalg.solve(self.global_matrix,self.global_vector)\n self.coeffs = np.append(self.coeffs, self.D) #Initial condition",
"def mvector(B, c):\n # for Sun Mg Potential: c=1.6281689374348\n A = np.zeros(shape=4)\n A[0] = (2 / 3) * B[0]\n A[1] = 0.5 * ((2 / sqrt(3)) * B[1] - A[0])\n A[2] = -A[0] - A[1]\n A[3] = B[2] / c\n return A",
"def _coef(ctx, J, eps):\n\n newJ = J+2 # compute more coefficients that are needed\n neweps6 = eps/2. # compute with a slight more precision that are needed\n\n # PREPARATION FOR THE COMPUTATION OF V(N) AND W(N)\n # See II Section 3.16\n #\n # Computing the exponent wpvw of the error II equation (81)\n wpvw = max(ctx.mag(10*(newJ+3)), 4*newJ+5-ctx.mag(neweps6))\n\n # Preparation of Euler numbers (we need until the 2*RS_NEWJ)\n E = ctx._eulernum(2*newJ)\n\n # Now we have in the cache all the needed Euler numbers.\n #\n # Computing the powers of pi\n #\n # We need to compute the powers pi**n for 1<= n <= 2*J\n # with relative error less than 2**(-wpvw)\n # it is easy to show that this is obtained\n # taking wppi as the least d with\n # 2**d>40*J and 2**d> 4.24 *newJ + 2**wpvw\n # In II Section 3.9 we need also that\n # wppi > wptcoef[0], and that the powers\n # here computed 0<= k <= 2*newJ are more\n # than those needed there that are 2*L-2.\n # so we need J >= L this will be checked\n # before computing tcoef[]\n wppi = max(ctx.mag(40*newJ), ctx.mag(newJ)+3 +wpvw)\n ctx.prec = wppi\n pipower = {}\n pipower[0] = ctx.one\n pipower[1] = ctx.pi\n for n in range(2,2*newJ+1):\n pipower[n] = pipower[n-1]*ctx.pi\n\n # COMPUTING THE COEFFICIENTS v(n) AND w(n)\n # see II equation (61) and equations (81) and (82)\n ctx.prec = wpvw+2\n v={}\n w={}\n for n in range(0,newJ+1):\n va = (-1)**n * ctx._eulernum(2*n)\n va = ctx.mpf(va)/ctx.fac(2*n)\n v[n]=va*pipower[2*n]\n for n in range(0,2*newJ+1):\n wa = ctx.one/ctx.fac(n)\n wa=wa/(2**n)\n w[n]=wa*pipower[n]\n\n # COMPUTATION OF THE CONVOLUTIONS RS_P1 AND RS_P2\n # See II Section 3.16\n ctx.prec = 15\n wpp1a = 9 - ctx.mag(neweps6)\n P1 = {}\n for n in range(0,newJ+1):\n ctx.prec = 15\n wpp1 = max(ctx.mag(10*(n+4)),4*n+wpp1a)\n ctx.prec = wpp1\n sump = 0\n for k in range(0,n+1):\n sump += ((-1)**k) * v[k]*w[2*n-2*k]\n P1[n]=((-1)**(n+1))*ctx.j*sump\n P2={}\n for n in range(0,newJ+1):\n ctx.prec = 15\n wpp2 = max(ctx.mag(10*(n+4)),4*n+wpp1a)\n ctx.prec = wpp2\n sump = 0\n for k in range(0,n+1):\n sump += (ctx.j**(n-k)) * v[k]*w[n-k]\n P2[n]=sump\n # COMPUTING THE COEFFICIENTS c[2n]\n # See II Section 3.14\n ctx.prec = 15\n wpc0 = 5 - ctx.mag(neweps6)\n wpc = max(6,4*newJ+wpc0)\n ctx.prec = wpc\n mu = ctx.sqrt(ctx.mpf('2'))/2\n nu = ctx.expjpi(3./8)/2\n c={}\n for n in range(0,newJ):\n ctx.prec = 15\n wpc = max(6,4*n+wpc0)\n ctx.prec = wpc\n c[2*n] = mu*P1[n]+nu*P2[n]\n for n in range(1,2*newJ,2):\n c[n] = 0\n return [newJ, neweps6, c, pipower]",
"def cost_b_v(self):\n return self._cost_b_v",
"def coefficient(self) -> float:\n ...",
"def c(self) -> np.ndarray:\n return self._vector[10:12]"
] | [
"0.65958256",
"0.65501094",
"0.6504454",
"0.6486117",
"0.6194975",
"0.61420995",
"0.60554177",
"0.60554177",
"0.60174334",
"0.5979773",
"0.59653145",
"0.59653145",
"0.59482545",
"0.59466475",
"0.5945674",
"0.59176135",
"0.59019333",
"0.58690476",
"0.58312464",
"0.582763",
"0.57844913",
"0.571731",
"0.5712013",
"0.5661301",
"0.5642825",
"0.5633623",
"0.56261474",
"0.5612051",
"0.5610591",
"0.55930054"
] | 0.83470845 | 0 |
Gets the names of all external parameters | def get_ext_param_names(self):
num_param = core.xc_func_info_get_n_ext_params(self.xc_func_info)
ret = []
for p in range(num_param):
tmp = core.xc_func_info_get_ext_params_name(self.xc_func_info, p)
ret.append(tmp.decode("UTF-8"))
return ret | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parameter_names(self) -> List[str]:",
"def get_param_names(self):\n return list(self.params.keys())",
"def parameter_names(self) -> list:\n parameters = []\n parameters.extend(self.properties.parameter_names)\n return parameters",
"def parameters_names(cls):\n return cls._Parameters._fields",
"def _get_param_names(self):\r\n return sorted([p\r\n for p in self.__dict__\r\n if p != 'additional_args'])",
"def get_ext_param_descriptions(self):\n num_param = core.xc_func_info_get_n_ext_params(self.xc_func_info)\n\n ret = []\n for p in range(num_param):\n tmp = core.xc_func_info_get_ext_params_description(self.xc_func_info, p)\n ret.append(tmp.decode(\"UTF-8\"))\n\n return ret",
"def get_param_names(hf):\n parameters = get_params(hf)\n return [p.name for p in parameters]",
"def parameterNames(self, p_int): # real signature unknown; restored from __doc__\n return []",
"def _external_params():\n list_ext_params = []\n list_ext_params.append(\n (hoomd.md.external.field.Periodic, \"params\",\n list([dict(A=1.5, i=1, w=3.5, p=5),\n dict(A=10, i=0, w=3.4, p=2)]), _evaluate_periodic))\n list_ext_params.append(\n (hoomd.md.external.field.Electric, \"E\", list([\n (1, 0, 0),\n (0, 2, 0),\n ]), _evaluate_electric))\n return list_ext_params",
"def get_parameter_names(self):\n parNames = []\n # for par in self.variables: # TODO: LIKELY A BUG! DOES THE SAME AS get_variable_names()\n for par in self.parameters: # TRYING TO SOLVE THE ISSUE\n # EstimationVariable\n parNames.append(par.name)\n return parNames",
"def parameters(self):\n return [p for _, a in vars(self).items() for p in self._params(a)]",
"def get_hyperparameter_names():\n params = ['mu', 'nu', 'r', 's']\n return params",
"def parameters(self):\n return []",
"def _get_fitted_param_names(self):\n return self._fitted_param_names",
"def get_paramnames_list(self):\n # TODO include syselem?\n\n query = \"SELECT NAME FROM %s\" % self.__schema\n with self.__connection.cursor() as cursor:\n cursor.execute(query)\n result = cursor.fetchall()\n return [val['NAME'] for val in result]",
"def get_params_list():\n return common.QOL_PARAMS",
"def get_layer_var_names(self):\n return(self.params)",
"def _get_parameters(self) -> list:\n return self.parameters",
"def get_params(self):\n return []",
"def get_str_param_names(self):\n # Exclude self.api and self.names from the command string\n return self.get_attribute_names(FormattedParameter)",
"def get_parameters_list(self):\n return self.description[\"config\"][\"values\"].keys()",
"def get_mandatory_param_names(self):\n all_names = self.params.keys()\n return [name for name in all_names \n if not self.params[name].is_optional]",
"def parameter_names(self):\n return [x for x in self.transformations.values() if isinstance(x, str)]",
"def variables_used (self) :\r\n\t\treturn [i[0] for i in self.parameters]",
"def keys(self):\n return self.params.keys()",
"def get_resource_params():\n return Parameter.list()",
"def parameters_list(self):\n return [getattr(self.parameters, p) for p in self.parameters_names()]",
"def get_required_parameters(self) -> list:\n results = []\n if self.no_params or self.params_optional:\n return []\n else:\n for parameter, parameter_details in self.parameters.items():\n # Fixing issue #92\n # if parameter == \"effect\":\n # continue\n if not parameter_details.default_value:\n results.append(parameter_details.name)\n return results",
"def param_names(\n self, *, include_tp: bool = False, include_gq: bool = False\n ) -> List[str]:\n return (\n self._param_names(self.model, int(include_tp), int(include_gq))\n .decode(\"utf-8\")\n .split(\",\")\n )",
"def getParameters(self): #$NON-NLS-1$\r"
] | [
"0.77504754",
"0.7389947",
"0.72158164",
"0.71607",
"0.71043503",
"0.7083737",
"0.7037365",
"0.70315456",
"0.7013595",
"0.70036983",
"0.698914",
"0.687916",
"0.6841593",
"0.6833137",
"0.68269855",
"0.6797805",
"0.6785064",
"0.6723883",
"0.6707179",
"0.6698673",
"0.6685417",
"0.6665723",
"0.66453034",
"0.66269267",
"0.6609356",
"0.6596573",
"0.658621",
"0.65673566",
"0.6565315",
"0.65632"
] | 0.7800671 | 0 |
Gets the descriptions of all external parameters | def get_ext_param_descriptions(self):
num_param = core.xc_func_info_get_n_ext_params(self.xc_func_info)
ret = []
for p in range(num_param):
tmp = core.xc_func_info_get_ext_params_description(self.xc_func_info, p)
ret.append(tmp.decode("UTF-8"))
return ret | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _external_params():\n list_ext_params = []\n list_ext_params.append(\n (hoomd.md.external.field.Periodic, \"params\",\n list([dict(A=1.5, i=1, w=3.5, p=5),\n dict(A=10, i=0, w=3.4, p=2)]), _evaluate_periodic))\n list_ext_params.append(\n (hoomd.md.external.field.Electric, \"E\", list([\n (1, 0, 0),\n (0, 2, 0),\n ]), _evaluate_electric))\n return list_ext_params",
"def getParameters(self): #$NON-NLS-1$\r",
"def parameters(self):\n return []",
"def print_all_params(self, disp=True):\n descriptions = {'general': {}}\n for name, param in self.params.items():\n descriptions['general'][name] = param.get_description()\n\n for comp, comp_obj in self.components.items():\n descriptions[comp] = {}\n for name in comp_obj.get_params():\n descriptions[comp][name] = comp_obj.get_param_description(name)\n return self._print_params(descriptions, disp)",
"def get_resource_params():\n return Parameter.list()",
"def print_params():\n\n help_out = convert_phil_to_text(master_phil, att_level=1)\n txt_out = convert_phil_to_text(master_phil)\n\n return help_out, txt_out",
"def get_parameters_list(self):\n return self.description[\"config\"][\"values\"].keys()",
"def paramDetails(cls):\n return {\n 'dim': (10, 20, 2, 20),\n 'nIter': (1, 10, 2, 5),\n 'lamb': (.1, 1., .1, .05),\n 'alph': (30, 50, 5, 40)\n }",
"def display_parameters(self):\n\n self.logging.debug(\"============\")\n for attr in self.parm_list:\n self.logging.debug(attr.label + \" (\" + attr.when + \")\" + \" = \" + str(attr.value))\n self.logging.debug(\"============\")",
"def get_ext_param_names(self):\n num_param = core.xc_func_info_get_n_ext_params(self.xc_func_info)\n\n ret = []\n for p in range(num_param):\n tmp = core.xc_func_info_get_ext_params_name(self.xc_func_info, p)\n ret.append(tmp.decode(\"UTF-8\"))\n\n return ret",
"def get_parameter_descriptions(parameters):\n\n lines = []\n opt_lines = []\n for param in parameters:\n param_name = check_param(flatten_param(param['name']))\n if param['required']:\n required = 'required'\n lines.append(':param {0}: ({1}) {2}'.format(param_name, required,\n param['description']))\n lines.append(':type {0}: {1}'.format(param_name, param['type']))\n else:\n required = 'optional'\n opt_lines.append(':param {0}: ({1}) {2}'.format(param_name,\n required, param['description']))\n opt_lines.append(':type {0}: {1} or None'.format(param_name,\n param['type']))\n\n return lines + opt_lines",
"def help(cls):\n print(cls._LIST_PARAMETERS)",
"def print_params(self):\n s = self._list_params()+\"\\n\"\n if 'scale_params' in self.__dict__.keys():\n s += self.scale_params._list_params()+\"\\n\"\n if 'atmospheric_params' in self.__dict__.keys():\n if self.atmospheric_params is not None:\n s += self.atmospheric_params._list_params()+\"\\n\"\n\n if 'atemperature_params' in self.__dict__.keys():\n if self.atemperature_params is not None:\n s += self.atemperature_params._list_params()+\"\\n\"\n\n if 'oceanic_params' in self.__dict__.keys():\n if self.oceanic_params is not None:\n s += self.oceanic_params._list_params()+\"\\n\"\n\n if 'ground_params' in self.__dict__.keys():\n if self.ground_params is not None:\n s += self.ground_params._list_params()+\"\\n\"\n\n if 'gotemperature_params' in self.__dict__.keys():\n if self.gotemperature_params is not None:\n s += self.gotemperature_params._list_params() + \"\\n\"\n\n print(\"Qgs v0.2.8 parameters summary\")\n print(\"=============================\\n\")\n print(s)",
"def get_params(self):",
"def parameter_names(self) -> List[str]:",
"def parameters(self):\n pass",
"def get_resource_params(self):\n return Parameter.list()",
"def get_resource_params(self):\n return Parameter.list()",
"def _get_parameters(self) -> list:\n return self.parameters",
"def get_param_texts(self):\n return self.param_texts",
"def gather_experiment_parameters(self):\n consts = win32com.client.constants.__dicts__[0]\n exp_params = [r for r in consts.keys() if len(r.split(\"EXP_\")) > 1]\n dm_params = [r for r in consts.keys() if len(r.split(\"DM_\")) > 1]\n self.app_param = {} \n self.appdoc_param = {} \n for p in exp_params:\n self.app_param.update({p:self.app.GetParam(consts[p])})\n\n for p in dm_params:\n #self.appdoc_param.update({p:self.app.GetParam(consts[p])}) bug? call appdoc? CP\n\n self.appdoc_param.update({p:self.app.GetParam(consts[p])})",
"def get_params(self):\n pass",
"def params_desc(self):\n return \"{}/{}/{}/{}\".format(\n self.learning_rate, self.movement, self.milestones, self.gamma\n )",
"def get_params_list():\n return common.QOL_PARAMS",
"def display_parameters(self):\n l = []\n for param in self.parameters.all():\n if len(param.value) > 16:\n l.append(u\"{}={}...\".format(param.name, param.value[:16]))\n else:\n l.append(u\"{}={}\".format(param.name, param.value))\n return \"; \".join(l)",
"def print_params(self):\n print(self._list_params())",
"def parameters(self):\n return [term.parameter for term in self.terms]",
"def parameters(self):",
"def get_params(self):\n return []",
"def param_strs(self):\n name_len = max(len(p.name) for p in self)\n value_len = max(len(p.value_str) for p in self.params.values())\n units_len = max(len(p.units) for p in self.params.values())\n return [(p.name.ljust(name_len), p.value_str.ljust(value_len),\n p.units.ljust(units_len), p.__doc__)\n for p in self.params.values() if p]"
] | [
"0.7344136",
"0.66554403",
"0.6622951",
"0.6612917",
"0.6590222",
"0.65895045",
"0.65495247",
"0.6545839",
"0.64892864",
"0.6436556",
"0.6409071",
"0.6368646",
"0.6351103",
"0.6338677",
"0.6323158",
"0.63069546",
"0.628588",
"0.628588",
"0.6284201",
"0.6275635",
"0.62606674",
"0.62537086",
"0.6227173",
"0.6226887",
"0.62034166",
"0.6192739",
"0.6192115",
"0.61768305",
"0.61747104",
"0.6159948"
] | 0.7465921 | 0 |
Gets the default values of all external parameters. | def get_ext_param_default_values(self):
num_param = core.xc_func_info_get_n_ext_params(self.xc_func_info)
ret = []
for p in range(num_param):
tmp = core.xc_func_info_get_ext_params_default_value(self.xc_func_info, p)
ret.append(tmp)
return ret | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parameters_default(cls):\n return cls._Parameters.__new__.__defaults__",
"def default_parameters(self) -> List[Parameter]:\n return self.settings.job_default_parameters",
"def initDefaults(self):\n return _libsbml.Parameter_initDefaults(self)",
"def parameters(self):\n return self._default_params",
"def get_parameters(self):\n params = {}\n for p in self.DEFAULT_VALUES.keys():\n params[p] = getattr(self, p)\n return params",
"def get_default_parameters(self, default_type):\n return self._default_parameters.get(default_type, {})",
"def getDefaultParameterValues(self):\r\n dct = {}\r\n self.initializeRoadRunnerModel()\r\n self.roadrunnerModel.reset()\r\n for parameterName in self.parametersToFit:\r\n dct[parameterName] = self.roadrunnerModel.model[parameterName]\r\n return dct",
"def get_defaults(self):\n\t\treturn self.__defaults",
"def _resolve_defaults(self, **kwargs):\n res = list()\n for name, value in kwargs.items():\n if value is None:\n value = self.default(name)\n if value is None:\n raise RuntimeError(f\"Missing default {name}\")\n res.append(value)\n return res",
"def _get_default_import_values(self, cr, uid, external_session, mapping_id=None, defaults=None, context=None):\n return defaults",
"def _default_parameters():\n\n return {\n 'opt': 'adadelta',\n 'activation_function': 'softmax',\n 'lr': 0.0001,\n 'decay': 1e-6,\n 'loss': 'categorical_crossentropy',\n 'batch_size': 32,\n 'nb_epoch': 20,\n 'shuffle': True,\n 'momentum': 0.9,\n 'nesterov': True,\n 'rho': 0.95,\n 'epsilon': 1e-08,\n 'beta_1': 0.9,\n 'beta_2': 0.999,\n 'horizontal_flip': False,\n 'im_size': 240,#256,\n 'dense_layer': 1024,\n 'nb_classes': 10,\n 'nb_channels': 3,\n 'dropout': 0.5,\n 'metrics': ['accuracy'],\n 'volume': None,\n 'input_size': 25,\n 'temporal': False,\n 'input_dim': 512,\n 'nb_frames': 60,\n 'stride': 16,\n 'nb_hidden':512,\n 'lstm': False\n\n }",
"def get_defaultvalues(host):\n return get_obj_defaultvalues(OBJT_HOST, host)",
"def _get_default_parameters(new_values):\n no_default = [\"BEAM\", \"TYPE\", \"ERRORDEF\", \"CORRECTIONS\"]\n\n not_found = [nf for nf in no_default if nf not in new_values]\n if any(not_found):\n raise ValueError(\"Required parameters '{}' not found.\".format(not_found))\n\n # Some defaults\n default = {\n # Beam Parameters\n \"QX\": \"62.31\",\n \"QY\": \"60.32\",\n \"CHROMX\": \"3\",\n \"CHROMY\": \"3\",\n # Settings\n \"USETHIN\": \"1\",\n \"ARCERRORS\": \"0\",\n \"CALCCORRECTIONS\": \"1\",\n # Outputs\n \"NOMINALMACHINE\": \"\",\n \"ARCAPPLIED\": \"\",\n \"MQXAPPLIED\": \"\",\n \"MBIPAPPLIED\": \"\",\n \"ALLAPPLIED\": \"\",\n \"CORRECTED\": \"\",\n }\n\n # crossing angles and separation bumps\n for idx in [1,2,5,8]:\n for prefix in [\"XING\", \"SEP\", \"PHI\"]:\n default[\"{:s}{:d}\".format(prefix, idx)] = \"0\"\n\n # applied errors\n for idx in range(1, 12):\n for orientation in [\"A\", \"B\"]:\n default[\"{:s}{:d}\".format(orientation, idx)] = \"0\"\n\n # return dictionary filled with defaults and new values\n default.update(new_values)\n return default",
"def default_parameters():\n return BackendNSParameters()",
"def default_values(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"default_values\")",
"def get_default_params():\n\n with IOTools.open_file(os.path.join(os.path.dirname(__file__),\n \"defaults.yml\")) as inf:\n result = yaml.load(inf, Loader=RoundTripLoader)\n return result",
"def _get_default_export_values(self, cr, uid, external_session, mapping_id=None, defaults=None, context=None):\n return defaults",
"def get_default_args(func):\n signature = inspect.signature(func)\n return {\n k: v.default\n for k, v in signature.parameters.items()\n if v.default is not inspect.Parameter.empty\n }",
"def _apply_defaults(self):\n # Applies normal parameter defaults\n for scalar_parameter, value in self._DEFAULT_PARAMETER_SCALARS.items():\n if scalar_parameter not in self.parameters:\n self.parameters[scalar_parameter] = copy.copy(value)\n\n # Applies defaults to all ramp parameters\n for table_parameter, table in self._DEFAULT_PARAMETER_TABLES.items():\n self.parameters[table_parameter] = [list(tup) for tup in table]\n self.parameters['_' + table_parameter] = zip(*self.parameters[table_parameter])",
"def get_default_params() -> Dict:\n default_params = {\n \"n_estimators\": {\n \"default_value\": 100,\n \"description\": \"Number of gradient boosted trees. \"\n \"Equivalent to number of boosting rounds.\",\n \"type\": \"int\"\n },\n \"max_depth\": {\n \"default_value\": 6,\n \"description\": \"Maximum tree depth for base learners.\",\n \"type\": \"int\"\n },\n \"learning_rate\": {\n \"default_value\": 0.3,\n \"description\": \"Boosting learning rate (xgb's 'eta')\",\n \"type\": \"float\"\n },\n \"verbosity\": {\n \"default_value\": 1,\n \"description\": \"The degree of verbosity. Valid values are 0 (silent) - 3 (debug).\",\n \"type\": [0, 1, 2, 3]\n },\n \"booster\": {\n \"default_value\": \"gbtree\",\n \"description\": \"Specify which booster to use: gbtree, gblinear or dart.\",\n \"type\": ['gbtree', 'gblinear', 'dart']\n },\n \"tree_method\": {\n \"default_value\": \"auto\",\n \"description\":\n '''\n Specify which tree method to use. Default to auto. If this parameter\n is set to default, XGBoost will choose the most conservative option\n available. It's recommended to study this option from parameters\n document.\n ''',\n \"type\": [\"auto\", \"exact\", \"approx\", \"hist\", \"gpu_hist\"]\n },\n \"n_jobs\": {\n \"default_value\": 1,\n \"description\": '''\n Number of parallel threads used to run xgboost. When used with other Scikit-Learn\n algorithms like grid search, you may choose which algorithm to parallelize and\n balance the threads. Creating thread contention will significantly slow dowm both\n algorithms.\n ''',\n \"type\": \"int\"\n },\n \"gamma\": {\n \"default_value\": 0.0,\n \"description\": \"Minimum loss reduction required to make a further \"\n \"partition on a leaf node of the tree.\",\n \"type\": \"float\"\n },\n \"min_child_weight\": {\n \"default_value\": 1.0,\n \"description\": \"Minimum loss reduction required to make a further \"\n \"partition on a leaf node of the tree.\",\n \"type\": \"float\"\n },\n \"max_delta_step\": {\n \"default_value\": 0.0,\n \"description\": \"Maximum delta step we allow each tree's weight estimation to be.\",\n \"type\": \"float\"\n },\n \"subsample\": {\n \"default_value\": 1.0,\n \"description\": \"Subsample ratio of the training instance.\",\n \"type\": \"float\"\n },\n \"colsample_bytree\": {\n \"default_value\": 1.0,\n \"description\": \"Subsample ratio of columns when constructing each tree.\",\n \"type\": \"float\"\n },\n \"colsample_bylevel\": {\n \"default_value\": 1.0,\n \"description\": \"Subsample ratio of columns for each level.\",\n \"type\": \"float\"\n },\n \"colsample_bynode\": {\n \"default_value\": 1.0,\n \"description\": \"Subsample ratio of columns for each split.\",\n \"type\": \"float\"\n },\n \"reg_alpha\": {\n \"default_value\": 0.0,\n \"description\": \"L1 regularization term on weights\",\n \"type\": \"float\"\n },\n \"reg_lambda\": {\n \"default_value\": 0.0,\n \"description\": \"L2 regularization term on weights\",\n \"type\": \"float\"\n },\n \"scale_pos_weight\": {\n \"default_value\": 1.0,\n \"description\": \"Balancing of positive and negative weights.\",\n \"type\": \"float\"\n },\n \"random_state\": {\n \"default_value\": 0,\n \"description\": \"Random number seed.\",\n \"type\": \"int\"\n },\n \"base_score\": {\n \"default_value\": 0.5,\n \"description\": \"The initial prediction score of all instances, global bias.\",\n \"type\": \"float\"\n },\n # \"missing\": {\n # \"default_value\": None,\n # \"description\": \"Value in the data which needs to be present as a missing value.\",\n # \"type\": \"float\"\n # },\n \"num_parallel_tree\": {\n \"default_value\": 1,\n \"description\": \"Used for boosting random forest.\",\n \"type\": \"int\"\n },\n # \"monotone_constraints\": {\n # \"default_value\": \"(0,0)\",\n # \"description\": \" Constraint of variable monotonicity. \"\n # \"See tutorial for more information.\",\n # \"type\": \"str\"\n # },\n # \"interaction_constraints\": {\n # \"default_value\": None,\n # \"description\": '''\n # Constraints for interaction representing permitted interactions. The\n # constraints must be specified in the form of a nest list, e.g. [[0, 1],\n # [2, 3, 4]], where each inner list is a group of indices of features\n # that are allowed to interact with each other. See tutorial for more\n # information\n # ''',\n # \"type\": \"str\"\n # },\n \"importance_type\": {\n \"default_value\": \"gain\",\n \"description\": '''\n The feature importance type for the feature_importances. property:\n either \"gain\", \"weight\", \"cover\", \"total_gain\" or \"total_cover\".\n ''',\n \"type\": [\"gain\", \"weight\", \"cover\", \"total_gain\", \"total_cover\"]\n }\n }\n\n return default_params",
"def default_parameters(name):\n prm = Parameters(name)\n\n prm.add('venous_compliance', float())\n prm.add('arterial_compliance', float())\n\n prm.add('venous_resistance', float())\n prm.add('arterial_resistance', float())\n prm.add('peripheral_resistance', float())\n\n prm.add('venous_resting_volume', float())\n prm.add('arterial_resting_volume', float())\n\n return prm",
"def default_parameters():\n prm = Parameters('windkessel_model')\n\n prm.add('total_volume', float())\n\n prm.add('venous_compliance', float())\n prm.add('arterial_compliance', float())\n\n prm.add('venous_resistance', float())\n prm.add('arterial_resistance', float())\n prm.add('peripheral_resistance', float())\n\n prm.add('venous_resting_volume', float())\n prm.add('arterial_resting_volume', float())\n\n return prm",
"def get_optional_parameters(self) -> list:\n results = []\n if self.no_params or self.params_required:\n return []\n else:\n for parameter, parameter_details in self.parameters.items():\n # Fixing issue #92\n # if parameter == \"effect\":\n # continue\n if parameter_details.default_value:\n results.append(parameter_details.name)\n return results",
"def _default_params(self) -> dict[str, Any]:\n return {\n \"max_tokens\": self.max_tokens,\n \"temperature\": self.temperature,\n \"top_p\": self.top_p,\n \"logprobs\": self.logprobs,\n \"echo\": self.echo,\n \"stop_sequences\": self.stop_sequences,\n \"repeat_penalty\": self.repeat_penalty,\n \"top_k\": self.top_k,\n \"n_threads\": self.n_threads,\n \"n_ctx\": self.n_ctx,\n \"n_gpu_layers\": self.n_gpu_layers,\n \"n_gqa\": self.n_gqa if self.n_gqa else None,\n \"n_parts\": self.n_parts,\n \"seed\": self.seed,\n \"f16_kv\": self.f16_kv,\n \"logits_all\": self.logits_all,\n \"vocab_only\": self.vocab_only,\n \"use_mlock\": self.use_mlock,\n \"n_batch\": self.n_batch,\n \"last_n_tokens_size\": self.last_n_tokens_size,\n \"streaming\": self.streaming,\n }",
"def _initialize_defaults(self):\n for key, value in defaults.items():\n if key not in self.source_params:\n self.source_params[key] = value",
"def _initialize_defaults(self):\n for key, value in defaults.items():\n if key not in self.source_params:\n self.source_params[key] = value",
"def get_cfg_defaults():\n # Return a clone so that the defaults will not be altered\n # This is for the \"local variable\" use pattern\n return C.clone()",
"def getDefaultProperties(self):\n return _libsbml.SBMLLocalParameterConverter_getDefaultProperties(self)",
"def get_cfg_defaults():\r\n # Return a clone so that the defaults will not be altered\r\n # This is for the \"local variable\" use pattern\r\n return _C.clone()",
"def default_params():\n params = {}\n params['dataset'] = 'adult'\n params['engines'] = ['MD','RDA']\n params['iters'] = 10000\n params['epsilon'] = 1.0\n params['delta'] = 0.0\n params['bounded'] = True\n params['frequency'] = 1\n params['seed'] = 0\n params['save'] = None\n params['load'] = None\n params['plot'] = None\n\n return params"
] | [
"0.7586267",
"0.75206536",
"0.7345313",
"0.7338341",
"0.7124671",
"0.708384",
"0.7017844",
"0.68795174",
"0.68700486",
"0.6861506",
"0.68081266",
"0.6789644",
"0.672888",
"0.670684",
"0.670621",
"0.6609515",
"0.6601124",
"0.6569071",
"0.6553868",
"0.6518829",
"0.6501356",
"0.6484483",
"0.6474542",
"0.6459664",
"0.6444757",
"0.6444757",
"0.640728",
"0.6397446",
"0.63938874",
"0.63924783"
] | 0.80710435 | 0 |
Sets the density threshold below which the functional will not be evaluated. | def set_dens_threshold(self, dens_threshold):
if dens_threshold < 0:
raise ValueError("The density threshold cannot be smaller than 0.")
core.xc_func_set_dens_threshold(self.xc_func, ctypes.c_double(dens_threshold)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setThreshold(self, threshold): # real signature unknown; restored from __doc__\n pass",
"def clear_density(self):\n self._density = None",
"def set_threshold(self, threshold):\n self._threshold = check_value_positive('threshold', threshold)",
"def threshold(self,thresholdValue):\n # TO DO\n pass",
"def clamp(self):\n self.threshold.data.clamp_(self.min_threshold)",
"def setThreshold(self, v):\n self._set(threshold=v)\n return self",
"def setThreshold(self, v):\n self._set(threshold=v)\n return self",
"def setThreshold(self, v):\n self._set(threshold=v)\n return self",
"def setThreshold(self, v):\n self._set(threshold=v)\n return self",
"def setThreshold(self, v):\n self._set(threshold=v)\n return self",
"def set_sigma_threshold(self, sigma_threshold):\n\n if sigma_threshold < 0:\n raise ValueError(\"The sigma threshold cannot be smaller than 0.\")\n\n core.xc_func_set_sigma_threshold(self.xc_func, ctypes.c_double(sigma_threshold))",
"def setThreshold(self, value):\n return self._set(threshold=value)",
"def SetThreshold (self,VolumeNode, min, max):\n DisplayNode = VolumeNode.GetScalarVolumeDisplayNode()\n DisplayNode.SetApplyThreshold(True)\n DisplayNode.SetThreshold(min,max)",
"def density(self, density):\n\n self._density = density",
"def __init__(self, threshold: float = 0.3, initial_val: float = 0.0) -> None:\n self.threshold = threshold\n self.initial_val = initial_val",
"def setPowerIfNecessary(self):\n if self.p.power == 0 and self.p.powerDensity > 0:\n self.setPowerFromDensity()",
"def unsetThresholdLevel(self):\n return _libsbml.Input_unsetThresholdLevel(self)",
"def set_threshold(self, cat, t):\n self.con.execute(\"update ct set threshold=%f where category='%s'\" \n % (t, cat))",
"def matrix_filtering_threshold(self, matrix_filtering_threshold):\n\n self._matrix_filtering_threshold = matrix_filtering_threshold",
"def set_ref_density(self, ref_density):\n self.ref_density = ref_density",
"def infer_threshold(self, x: np.ndarray, fpr: float) -> None:\n self.backend.infer_threshold(self.backend._to_backend_dtype(x), fpr)",
"def make_conditional_density(bgm_fit, threshold, sigma, width):\n pass",
"def prune_values(self, threshold):\n changed = False\n new_table = dict()\n for assignment in self._table.keys():\n prob = self._table[assignment]\n if prob >= threshold:\n new_table[assignment] = prob\n else:\n changed = True\n\n self._table = new_table\n return changed",
"def set_fade_threshold(self, address):\n self.model.fade_address = address",
"def set_min_uncertainty(signal, threshold=0.05):\n # Increase Hirex-Sr uncertainties to be a rel error of 5% minimum (JUST FOR TESTING)\n corrected_unc=signal.std_y/signal.y<=0.05\n signal.std_y[corrected_unc]=0.05*signal.y[corrected_unc]\n\n # correction for normalized uncertainties\n if signal.s/signal.m<=0.05:\n signal.s=0.05*signal.m\n\n signal.std_y_norm=scipy.sqrt((signal.std_y / signal.m)**2.0 + ((signal.y / signal.m)*(signal.s / signal.m))**2.0)",
"def updateThreshold(self, t):\n\n budget = self.budget\n self.threshold = self.init_threshold * self.diameter * ((budget-t) / self.budget)**self.decay_factor",
"def threshold_col_del(self, threshold):\n self.data = self.data.dropna(thresh=threshold*len(self.data), axis=1) \n self.X = self.data.drop(self.target, axis =1)\n self.y = self.data[self.target]",
"def threshold(self) -> float:\n return pulumi.get(self, \"threshold\")",
"def apply_threshold(da, threshold=1.):\n with np.errstate(all='ignore'):\n result = xr.where(da < threshold, np.nan, da)\n result.attrs = da.attrs\n return result",
"def _check_density(density, n_features):\n if density == \"auto\":\n density = 1 / np.sqrt(n_features)\n\n elif density <= 0 or density > 1:\n raise ValueError(\"Expected density in range ]0, 1], got: %r\" % density)\n return density"
] | [
"0.63226175",
"0.61955786",
"0.6171416",
"0.61262935",
"0.59644085",
"0.58235157",
"0.58235157",
"0.58235157",
"0.58235157",
"0.58235157",
"0.57770663",
"0.5771117",
"0.5760468",
"0.5694355",
"0.5690046",
"0.56802684",
"0.5664838",
"0.56599784",
"0.56505716",
"0.5615393",
"0.56036675",
"0.5583478",
"0.55800015",
"0.55748904",
"0.55462146",
"0.55284065",
"0.5477165",
"0.5432412",
"0.542168",
"0.5413156"
] | 0.7752366 | 0 |
Calculate the number of columns and rows required to divide an image into ``n`` parts. Return a tuple of integers in the format (num_columns, num_rows) | def calc_columns_rows(n):
num_columns = int(ceil(sqrt(n)))
num_rows = int(ceil(n / float(num_columns)))
return (num_columns, num_rows) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def compute_rows_columns(num_wells):\n a = math.sqrt(num_wells / 6)\n n_rows = int(round(2 * a))\n n_columns = int(round(3 * a))\n return n_rows, n_columns",
"def compute_nrows_ncolumns(nplots):\n n_rows = int(np.sqrt(nplots)) + (np.sqrt(nplots) != int(np.sqrt(nplots))) * 1\n n_columns = int(nplots / n_rows) + (nplots / n_rows != int(nplots / n_rows)) * 1\n return n_rows, n_columns",
"def calculate_grid_dimensions(num_items, num_columns=None):\n if num_columns is None:\n num_rows_columns = int(math.ceil(math.sqrt(num_items)))\n return num_rows_columns, num_rows_columns\n else:\n num_rows = int(math.ceil(num_items / num_columns))\n return num_rows, num_columns",
"def _num_extracted_rows_and_columns(\n image_size: int,\n patch_size: int,\n stride: int,\n num_scales: int,\n scale_factor: int,\n) -> int:\n largest_patch_size = int(patch_size * (scale_factor**(num_scales - 1)))\n residual = image_size - largest_patch_size\n return (residual // stride) + 1",
"def get_num_tiles(rows, cols, row_tile_size, col_tile_size):\n num_row_tiles = math.ceil(rows / row_tile_size)\n num_col_tiles = math.ceil(cols / col_tile_size)\n return num_row_tiles, num_col_tiles",
"def getLayoutDimensions(n, pref=\"height\"):\n nopt = np.sqrt(n)\n inoptw = int(nopt)\n inopth = int(nopt)\n while inoptw * inopth < n:\n if pref == \"width\":\n inoptw += 1\n if inoptw * inopth > (n - inopth):\n inoptw -= 1\n inopth += 1\n else:\n inopth += 1\n if inoptw * inopth > (n - inoptw):\n inopth -= 1\n inoptw += 1\n\n return (inopth, inoptw)",
"def _get_number_of_rows_to_process(self, bitsPerPixel):\n # TODO: do a better job estimating the number of rows to process.\n # Compute the number of pixels that fit under the memory limit.\n memLimit = (psutil.virtual_memory().available/\n (bitsPerPixel*(1024**2)))\n memLimit = int(50*np.floor(memLimit/10.0))\n numStackPix = memLimit*(1024**2)*8/bitsPerPixel\n\n # Grab the number of images and the shape of those image\n numImg, ny, nx = self.shape\n\n # Compute the number of rows to be processed in each chunk\n numRows = int(np.floor(numStackPix/(numImg*nx)))\n\n # Catch the case where ALL rows get handled at once\n if numRows > ny: numRows = ny\n numSections = int(np.ceil(ny/numRows))\n\n # Recompute the number of rows to be evenly spaced\n numRows = int(np.ceil(ny/numSections))\n\n return numRows, numSections",
"def get_grid_size(self, img):\r\n grid_height = int(np.ceil(img.shape[0] / self.config.grid_row))\r\n grid_width = int(np.ceil(img.shape[1] / self.config.grid_col))\r\n return grid_height, grid_width",
"def _get_split_sizes(self, n_examples):\n\n min_ex = (int(n_examples // self.n_splits)\n * np.ones(self.n_splits, dtype=np.int8))\n \n rem = np.array(\n [1 if i < n_examples % self.n_splits else 0\n for i in range(self.n_splits)],\n dtype=np.int8)\n\n return np.add(min_ex, rem)",
"def number_of_patches(width, height, patch_size):\n n_patches_x = width // patch_size\n n_patches_y = height // patch_size\n return n_patches_x, n_patches_y",
"def infer_ncols_nrows(n_subplots, ncols, nrows, max_ncols, **kwargs):\n _ = kwargs\n\n # Make ncols/nrows\n if ncols is None and nrows is None:\n ncols = min(max_ncols, n_subplots)\n nrows = ceil_div(n_subplots, ncols)\n elif ncols is None:\n ncols = ceil_div(n_subplots, nrows)\n elif nrows is None:\n nrows = ceil_div(n_subplots, ncols)\n\n return ncols, nrows",
"def get_number_of_rows_and_columns(m):\n\n r = int(np.sqrt(m))\n c = m // r if np.mod(m, r) == 0 else m // r + 1\n return r, c",
"def getNumTiles(self):\n return len(list(product(list(range(self.width+1))[1:], list(range(self.height+1))[1:])))",
"def _get_dimensions(self):\n corners = []\n for module in self.modules:\n for tile in module:\n corners.append(tile.corners())\n corners = np.concatenate(corners)[:, :2] / self._pixel_shape\n\n # Find extremes, add 1 px margin to allow for rounding errors\n min_xy = corners.min(axis=0).astype(int) - 1\n max_xy = corners.max(axis=0).astype(int) + 1\n\n size = max_xy - min_xy\n centre = -min_xy\n # Switch xy -> yx\n return tuple(size[::-1]), centre[::-1]",
"def row_count(self):\n return self.well_count // self.col_count",
"def _get_dimensions(self):\n corners = []\n for module in self.modules:\n for tile in module:\n corners.append(tile.corner_idx)\n corners.append(tile.opp_corner_idx)\n corners = np.stack(corners)\n\n # Find extremes\n min_yx = corners.min(axis=0)\n max_yx = corners.max(axis=0)\n\n size = max_yx - min_yx\n centre = -min_yx\n return tuple(size), centre",
"def get_grid_shape(num_examples):\n height = int(numpy.floor(numpy.sqrt(num_examples)))\n width = int(numpy.ceil(num_examples * 1. / height))\n\n return (height, width)",
"def get_num_chunks(self) -> int:",
"def get_size(img):\n ih, iw = img.shape[:2]\n return iw * ih",
"def _compute_rows_and_cols_corrected(n_rows: int, n_cols: int, confmat_sum: Tensor) ->Tuple[Tensor, Tensor]:\n rows_corrected = n_rows - (n_rows - 1) ** 2 / (confmat_sum - 1)\n cols_corrected = n_cols - (n_cols - 1) ** 2 / (confmat_sum - 1)\n return rows_corrected, cols_corrected",
"def count_tilings(n: int) -> int:\n if n < 5:\n # handle recursive base case\n return 2**(n - 1)\n else:\n # place each tile at end of row and recurse on remainder\n return (count_tilings(n - 1) +\n count_tilings(n - 2) +\n count_tilings(n - 3) +\n count_tilings(n - 4))",
"def count_divisions(num, n):\n count = 0\n while pe_005.is_divisible(num, n):\n num = num // n\n count += 1\n return count, num",
"def output_image_size(n_patches_x, n_patches_y, patch_size):\n width = n_patches_x * patch_size\n height = n_patches_y * patch_size\n return width, height",
"def getDimension(data):\r\n # open image for reading in binary mode\r\n\r\n # read the 2 bytes\r\n a = data[163:165]\r\n\r\n # calculate height\r\n height = (a[0] << 8) + a[1]\r\n\r\n # next 2 bytes is width\r\n a = data[165:167]\r\n\r\n # calculate width\r\n width = (a[0] << 8) + a[1]\r\n\r\n return (width, height)",
"def findWidthHeight():\n\n for f in os.listdir(\"%s/train/images/\" % args.dataset):\n if f.endswith(\".jpeg\"):\n imf = \"%s/train/images/%s\" % (args.dataset, f)\n try:\n im = Image.open(imf)\n except:\n print \"Could not open training image %s to read its size.\" %imf\n usage()\n break\n \n width = int(im.size[0])\n height = int(im.size[1])\n \n nwidth = width\n nheight = height\n if args.width:\n nwidth = args.width\n if args.height:\n nheight = args.height\n\n return width, height, nwidth, nheight, not(width == nwidth and height == nheight)",
"def get_dims(self):\n row_lbl, col_lbl = self.get_idxvals()\n return len(row_lbl), len(col_lbl)",
"def voxel_count(self):\n return self.cols * self.rows * self.sections",
"def getNumTiles(self):\n return self.w * self.h",
"def _get_image_dimensions(self):\n\t\timageWidth = int(self.labels['IMAGE']['LINE_SAMPLES'])\n\t\timageHeight = int(self.labels['IMAGE']['LINES'])\n\t\treturn imageWidth, imageHeight",
"def get_img_dims(img):\n height, width = img.shape[:2]\n return width, height"
] | [
"0.73097175",
"0.7159921",
"0.6962047",
"0.69273585",
"0.6925103",
"0.6749133",
"0.66736543",
"0.6470178",
"0.64632785",
"0.63872576",
"0.6368298",
"0.62814194",
"0.6258692",
"0.6197939",
"0.61734897",
"0.6155329",
"0.6155302",
"0.61514986",
"0.61438143",
"0.6133328",
"0.61312526",
"0.6130664",
"0.6118315",
"0.61169475",
"0.61026084",
"0.6093603",
"0.60616916",
"0.60548323",
"0.60413",
"0.60203373"
] | 0.79326427 | 0 |
Calculate combined size of tiles. | def get_combined_size(tiles):
# TODO: Refactor calculating layout to avoid repetition.
columns, rows = calc_columns_rows(len(tiles))
tile_size = tiles[0].image.size
return (tile_size[0] * columns, tile_size[1] * rows) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def tile_size_2d(self):\n return 32.0, 32.0",
"def get_num_tiles(rows, cols, row_tile_size, col_tile_size):\n num_row_tiles = math.ceil(rows / row_tile_size)\n num_col_tiles = math.ceil(cols / col_tile_size)\n return num_row_tiles, num_col_tiles",
"def get_tilesize(self, sampling):\n xsize = {\n 'T6': 600000,\n 'T3': 300000,\n 'T1': 100000\n }[self.get_tiletype(sampling)]\n ysize = {\n 'T6': 600000,\n 'T3': 300000,\n 'T1': 100000\n }[self.get_tiletype(sampling)]\n return xsize, ysize",
"def getNumTiles(self):\n return len(list(product(list(range(self.width+1))[1:], list(range(self.height+1))[1:])))",
"def num_tiles(self):\n return self.num_row_tiles * self.num_col_tiles",
"def getNumTiles(self):\n return self.w * self.h",
"def getNumTiles(self):\n return (self.width) * (self.height)",
"def getNumTiles(self):\n #raise NotImplementedError #refer https://docs.python.org/2/library/exceptions.html\n return self.width * self.height",
"def getNumTiles(self):\n return self.height * self.width",
"def calc_size(self):\r\n pass",
"def calculate_size(self, num_dots):\n self.objects = num_dots\n square = sqrt(self.objects)\n if self.objects % square == 0:\n return int(square), int(square)\n else:\n denom = self.objects // sqrt(self.objects)\n while self.objects % denom != 0:\n denom -= 1\n return int(denom), int(self.objects // denom)",
"def output_image_size(n_patches_x, n_patches_y, patch_size):\n width = n_patches_x * patch_size\n height = n_patches_y * patch_size\n return width, height",
"def get_size(self):\n tmpsize = 0\n for variable in self.variables:\n tmpsize += variable.get_size()\n for subchunk in self.subchunks:\n tmpsize += subchunk.get_size()\n self.size.value = tmpsize\n return self.size.value + self.ID.get_size() + self.size.get_size()",
"def get_size(self):\n tmpsize = 0\n for variable in self.variables:\n tmpsize += variable.get_size()\n for subchunk in self.subchunks:\n tmpsize += subchunk.get_size()\n return tmpsize",
"def calculate_min_max_tiles(self):",
"def calc_size(cls) -> int:\n return calcsize('<' + cls.fmt)",
"def _calculate_room_size(self):\n config = self.game.config\n\n short_side = min(config.map_height, config.map_width)\n\n largest_room_size = 0\n total_size = 0\n total_corridor_len = self.corridor_length * (self.grid_size - 1)\n for check_size in range(3, short_side, 2):\n all_rooms_len = check_size * self.grid_size\n rooms_and_corridors = all_rooms_len + total_corridor_len\n if rooms_and_corridors <= short_side:\n largest_room_size = check_size\n total_size = rooms_and_corridors\n else:\n break\n\n return largest_room_size, total_size",
"def _get_dimensions(self):\n corners = []\n for module in self.modules:\n for tile in module:\n corners.append(tile.corner_idx)\n corners.append(tile.opp_corner_idx)\n corners = np.stack(corners)\n\n # Find extremes\n min_yx = corners.min(axis=0)\n max_yx = corners.max(axis=0)\n\n size = max_yx - min_yx\n centre = -min_yx\n return tuple(size), centre",
"def calculate_size(self):\n top_left_y = 0\n top_left_x = 0\n\n bottom_right_y = 1\n bottom_right_x = 1\n\n # TODO: calculate the correct bounds of the threat zone.\n\n raise NotImplementedError\n\n # if there is a sight_range for this map_obstacle then increase the size of the zone.\n if self.sight_range > 0:\n top_left_y += self.sight_range\n top_left_x += self.sight_range\n bottom_right_y += self.sight_range\n bottom_right_x += self.sight_range\n\n top_left = (top_left_y, top_left_x)\n bottom_right = (bottom_right_y, bottom_right_x)\n\n height = bottom_right_y - top_left_y\n width = bottom_right_x - top_left_x\n\n self.top_left_y = top_left_y\n self.top_left_x = top_left_x\n self.bottom_right_y = bottom_right_y\n self.bottom_right_x = bottom_right_x\n self.height = height\n self.width = width\n\n return (top_left, bottom_right, height, width)",
"def _get_dimensions(self):\n corners = []\n for module in self.modules:\n for tile in module:\n corners.append(tile.corners())\n corners = np.concatenate(corners)[:, :2] / self._pixel_shape\n\n # Find extremes, add 1 px margin to allow for rounding errors\n min_xy = corners.min(axis=0).astype(int) - 1\n max_xy = corners.max(axis=0).astype(int) + 1\n\n size = max_xy - min_xy\n centre = -min_xy\n # Switch xy -> yx\n return tuple(size[::-1]), centre[::-1]",
"def get_tile_size(self, map_size = None, show_info = None):\n if not map_size: map_size = self.map_size\n w,h = self.img_size\n x_tiles,y_tiles = map_size\n\n tile_raw_w = w / x_tiles\n tile_raw_h = h / y_tiles\n\n if self.debug:\n print(f' ► Raw tile width: {tile_raw_w}\\n ► Raw tile height: {tile_raw_h}')\n\n tile_w = int(round(tile_raw_w))\n tile_h = int(round(tile_raw_h))\n\n if show_info:\n print(f' Image Size: {w} x {h} px\\n Tile Size: {tile_w} x {tile_h} px\\n Map Size: {x_tiles} x {y_tiles} tiles')\n\n error_w = tile_w - tile_raw_w\n error_h = tile_h - tile_raw_h\n print(f'\\n -=ERROR INFO=-\\n Tile Size Width Error: {round(error_w,4)} px \\n Tile Size Height Error: {round(error_h,4)} px \\n Total Width Rounding Error: {round(error_w * x_tiles,4)} px \\n Total Height Rounding Error: {round(error_h * y_tiles,4)} px\\n')\n\n return (tile_raw_w,tile_raw_h)",
"def compute_combined_size(size_dict, modes):\n size = 1\n for mode in modes:\n size *= size_dict[mode]\n return size",
"def size(self):\n return reduce(lambda x, ins: x + ins.size, self.instructions, 0)",
"def get_tile_size(num_pixels, tile_size=400):\n\n # How many times can we repeat a tile of the desired size.\n num_tiles = int(round(num_pixels / tile_size))\n\n # Ensure that there is at least 1 tile.\n num_tiles = max(1, num_tiles)\n\n # The actual tile-size.\n actual_tile_size = math.ceil(num_pixels / num_tiles)\n\n return actual_tile_size",
"def compute_outub_size(height, width, dtype, core_nums):\n ubuf_size = 100 * 1024 # ub whole size 100 * 1024 byte\n out_ele_perblock = compute_perblock_nums(dtype)\n out_blocks = math.ceil(height * width / out_ele_perblock)\n block_per_core = math.ceil(out_blocks / core_nums)\n use_cores = math.ceil(out_blocks / block_per_core)\n out_ele_size = cce.cce_intrin.get_bit_len(dtype) // BITS_NUMS\n out_f16_size = cce.cce_intrin.get_bit_len(\"float16\") // BITS_NUMS\n out_int8_size = cce.cce_intrin.get_bit_len(\"int8\") // BITS_NUMS\n if dtype in [\"int8\", \"uint8\"]:\n need_size = block_per_core * out_ele_perblock * (out_f16_size + out_int8_size)\n if need_size > ubuf_size:\n block_num = ubuf_size // (out_ele_perblock * (out_f16_size + out_int8_size))\n out_factor = math.ceil(block_per_core / block_num)\n last_remian = block_per_core % block_num\n else:\n block_num = block_per_core\n out_factor = 1\n last_remian = 0\n total_len = block_num * out_ele_perblock\n else:\n need_size = block_per_core * out_ele_size * out_ele_perblock\n if need_size > ubuf_size:\n block_num = ubuf_size // BYTES_PER_BLOCK\n out_factor = math.ceil(block_per_core / block_num)\n last_remian = block_per_core % block_num\n else:\n block_num = block_per_core\n out_factor = 1\n last_remian = 0\n total_len = block_num * out_ele_perblock\n\n return block_num, block_per_core, out_factor, last_remian, total_len, use_cores",
"def get_size(self) -> int:\n total_size = 0\n for entry in self.__entries:\n total_size += entry.get_size()\n return total_size",
"def compute_size(self):\n length = np.max(np.max(self.positions, axis=1) -\n np.min(self.positions, axis=1))\n return length + 2*self.get_radii().max()",
"def compute_tile_size(total_size,\n tile_size_min=180,\n tile_size_max=512,\n tile_size_step=2,\n chunk_size=None,\n num_levels_min=None,\n int_div=False):\n\n ts = total_size\n num_levels = 0\n while ts % 2 == 0:\n ts2 = ts // 2\n if ts2 < tile_size_min:\n break\n ts = ts2\n num_levels += 1\n\n if ts <= tile_size_max and (not num_levels_min or num_levels >= num_levels_min):\n return ts\n\n min_penalty = 10 * total_size\n best_tile_size = None\n for ts in range(tile_size_min, tile_size_max + 1, tile_size_step):\n\n if int_div and total_size % ts:\n continue\n\n num_tiles = cardinal_div_round(total_size, ts)\n if num_levels_min:\n num_levels = cardinal_log2(num_tiles * ts)\n if num_levels < num_levels_min:\n continue\n\n total_size_excess = ts * num_tiles - total_size\n penalty = total_size_excess\n\n if chunk_size:\n num_chunks = cardinal_div_round(ts, chunk_size)\n tile_size_excess = ts * num_chunks - ts\n penalty += tile_size_excess\n\n if penalty < min_penalty:\n min_penalty = penalty\n best_tile_size = ts\n\n if not best_tile_size:\n # if no suitable tile size can be found, use the image untiled\n best_tile_size = total_size\n return best_tile_size",
"def __len__(self) -> int:\n return len(self._tiles)",
"def getSize(self):\n return (int(self.getWidth()), int(self.getHeight()))"
] | [
"0.6917832",
"0.6854386",
"0.66924536",
"0.66773844",
"0.6626963",
"0.6563682",
"0.6505493",
"0.6481431",
"0.64496267",
"0.641139",
"0.63941205",
"0.631634",
"0.6312036",
"0.627973",
"0.62632513",
"0.625943",
"0.6246556",
"0.621888",
"0.6217926",
"0.62107056",
"0.61723995",
"0.6150797",
"0.6135983",
"0.61272764",
"0.6107911",
"0.6065271",
"0.60528404",
"0.6042425",
"0.6030999",
"0.6018477"
] | 0.8319171 | 0 |
``tiles`` Tuple of ``Image`` instances. ``width`` Optional, width of combined image. ``height`` Optional, height of combined image. ``Image`` instance. | def join(tiles, width=0, height=0):
# Don't calculate size if width and height are provided
# this allows an application that knows what the
# combined size should be to construct an image when
# pieces are missing.
if width > 0 and height > 0:
im = Image.new("RGBA", (width, height), None)
else:
im = Image.new("RGBA", get_combined_size(tiles), None)
columns, rows = calc_columns_rows(len(tiles))
for tile in tiles:
try:
im.paste(tile.image, tile.coords)
except IOError:
# do nothing, blank out the image
continue
return im | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _tile_images(imgs, tile_shape, concatenated_image, margin_color=None):\n x_num, y_num = tile_shape\n one_width = imgs[0].shape[1]\n one_height = imgs[0].shape[0]\n if concatenated_image is None:\n concatenated_image = np.zeros((one_height * y_num, one_width * x_num, 3),\n dtype=np.uint8)\n if margin_color is not None:\n concatenated_image[:, :] = margin_color\n for y in range(y_num):\n for x in range(x_num):\n i = x + y * x_num\n if i >= len(imgs):\n pass\n else:\n concatenated_image[y*one_height:(y+1)*one_height,x*one_width:(x+1)*one_width,] = imgs[i]\n return concatenated_image",
"def combine_images(images: list) -> Image:\n img_width = images[0][0].width\n img_height = images[0][0].height\n new_size = (img_width * len(images[0]), img_height * len(images))\n new_image = Image.new('RGB', new_size)\n\n # Add all the images from the grid to the new, blank image\n for rowindex, row in enumerate(images):\n for colindex, image in enumerate(row):\n location = (colindex * img_width, rowindex * img_height)\n new_image.paste(image, location)\n\n return new_image",
"def tile_images(image_stack):\n assert len(image_stack.shape) == 4\n image_list = [image_stack[i, :, :, :] for i in range(image_stack.shape[0])]\n tiled_images = np.concatenate(image_list, axis=1)\n return tiled_images",
"def tiles(self, width: int, height: int) -> TileSet:\n y_count = len(self.tiling)\n for y_index, y_tile in enumerate(self.tiling):\n\n x_count = len(y_tile)\n for x_index, tile_strength in enumerate(y_tile):\n\n # Doing multiplication before devision here to make sure rounding is correct\n bounding_box = (\n # from (x1, y1)\n int(width * x_index / x_count),\n int(height * y_index / y_count),\n # to (x2, y2)\n int(width * (x_index + 1) / x_count),\n int(height * (y_index + 1) / y_count),\n )\n\n yield bounding_box, tile_strength",
"def combine_pictures(images):\n widths, heights = zip(*(i.size for i in images))\n\n total_width = sum(widths)\n max_height = max(heights)\n\n new_im = Image.new('RGB', (total_width, max_height))\n\n x_offset = 0\n for im in images:\n new_im.paste(im, (x_offset, 0))\n x_offset += im.size[0]\n\n new_im.save('test.jpg')\n\n return True",
"def tile_image(\n im: Image.Image, width: int, height: int, mode: Optional[str] = \"RGB\", **kwargs: Any\n) -> Image.Image:\n im_out = Image.new(mode, (width, height), **kwargs)\n\n h_tiles = ceil(width / im.width)\n v_tiles = ceil(height / im.height)\n\n for i in range(v_tiles):\n y = im.height * i\n for j in range(h_tiles):\n x = im.width * j\n im_out.paste(im, box=(x, y))\n\n return im_out",
"def get_tile_image(imgs, tile_shape=None, result_img=None, margin_color=None):\n def get_tile_shape(img_num):\n x_num = 0\n y_num = int(math.sqrt(img_num))\n while x_num * y_num < img_num:\n x_num += 1\n return x_num, y_num\n\n if tile_shape is None:\n tile_shape = get_tile_shape(len(imgs))\n\n # get max tile size to which each image should be resized\n max_height, max_width = np.inf, np.inf\n for img in imgs:\n max_height = min([max_height, img.shape[0]])\n max_width = min([max_width, img.shape[1]])\n\n # resize and concatenate images\n for i, img in enumerate(imgs):\n h, w = img.shape[:2]\n h_scale, w_scale = max_height / h, max_width / w\n scale = min([h_scale, w_scale])\n h, w = int(scale * h), int(scale * w)\n img = cv2.resize(img, (w, h))\n img = centerize(img, (max_height, max_width, 3),\n margin_color=margin_color)\n imgs[i] = img\n return _tile_images(imgs, tile_shape, result_img,\n margin_color=margin_color)",
"def get_combined_size(tiles):\n # TODO: Refactor calculating layout to avoid repetition.\n columns, rows = calc_columns_rows(len(tiles))\n tile_size = tiles[0].image.size\n return (tile_size[0] * columns, tile_size[1] * rows)",
"def image_tiles(bqsession, image_service_url, tile_size=64):\n dims = bqsession.fetchxml(image_service_url, dims='')\n x = int(dims.xpath('//tag[@name=\"image_num_x\"]')[0].attrib[ 'value'])\n y = int(dims.xpath('//tag[@name=\"image_num_y\"]')[0].attrib[ 'value'])\n \n for ix in range(int(x/tile_size)-1):\n for iy in range(int(y/tile_size)-1):\n yield bqsession.c.prepare_url(image_service_url, tile='0,%s,%s,%s' % (str(ix), str(iy), str(tile_size)))",
"def pack_image_nest(cls, imgs):\n assert rpack is not None, \"You need to install rectangle-packer first!\"\n\n imgs = nest.flatten(imgs)\n if len(imgs) == 0:\n return\n\n # first get all images' sizes (w,h)\n sizes = [(i.shape[1], i.shape[0]) for i in imgs]\n # call rpack for an approximate solution: [(x,y),...] positions\n positions = rpack.pack(sizes)\n # compute the height and width of the enclosing rectangle\n H, W = 0, 0\n for size, pos in zip(sizes, positions):\n H = max(H, pos[1] + size[1])\n W = max(W, pos[0] + size[0])\n\n packed_img = np.full((H, W, 3), 255, dtype=np.uint8)\n for pos, img in zip(positions, imgs):\n packed_img[pos[1]:pos[1] + img.shape[0], pos[0]:pos[0] +\n img.shape[1], :] = img.data\n return cls(packed_img)",
"def tile_images(img, img_size=32, rows=4, cols=4, spacing=1):\n images = np.ones([3, rows * (img_size + spacing) - spacing, cols * (img_size + spacing)], dtype=np.float32)\n coords = [(i, j) for i in range(rows) for j in range(cols)]\n\n for (i, j), image in zip(coords, img):\n x = i * (img_size + spacing)\n y = j * (img_size + spacing)\n images[:, x: x+img_size, y:y+img_size] = image\n\n return images",
"def _split_image_into_tiles(\n self, image: np.ndarray\n ) -> t.Sequence[t.Tuple[t.Tuple[t.Any, ...], np.ndarray]]:\n h, w, c = image.shape\n tile_height = (\n math.ceil(h / (self._n_tiles // 2 - 1))\n if self._n_tiles > 4\n else math.ceil(h / (self._n_tiles // 2))\n )\n tile_width = math.ceil(w / (self._n_tiles // 2))\n tiles = [] # type: ignore\n for i in range(0, h, tile_height):\n for j in range(0, w, tile_width):\n tiles.append(\n (\n (i, i + tile_height, j, j + tile_width),\n image[i : i + tile_height, j : j + tile_width, :],\n )\n )\n return tiles",
"def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),\r\n scale_rows_to_unit_interval=True,\r\n output_pixel_vals=True):\r\n\r\n assert len(img_shape) == 2\r\n assert len(tile_shape) == 2\r\n assert len(tile_spacing) == 2\r\n\r\n # The expression below can be re-written in a more C style as\r\n # follows :\r\n #\r\n # out_shape = [0,0]\r\n # out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -\r\n # tile_spacing[0]\r\n # out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -\r\n # tile_spacing[1]\r\n out_shape = [(ishp + tsp) * tshp - tsp for ishp, tshp, tsp\r\n in zip(img_shape, tile_shape, tile_spacing)]\r\n\r\n if isinstance(X, tuple):\r\n assert len(X) == 4\r\n # Create an output numpy ndarray to store the image\r\n if output_pixel_vals:\r\n out_array = numpy.zeros((out_shape[0], out_shape[1], 4),\r\n dtype='uint8')\r\n else:\r\n out_array = numpy.zeros((out_shape[0], out_shape[1], 4),\r\n dtype=X.dtype)\r\n\r\n #colors default to 0, alpha defaults to 1 (opaque)\r\n if output_pixel_vals:\r\n channel_defaults = [0, 0, 0, 255]\r\n else:\r\n channel_defaults = [0., 0., 0., 1.]\r\n\r\n for i in xrange(4):\r\n if X[i] is None:\r\n # if channel is None, fill it with zeros of the correct\r\n # dtype\r\n dt = out_array.dtype\r\n if output_pixel_vals:\r\n dt = 'uint8'\r\n out_array[:, :, i] = numpy.zeros(out_shape,\r\n dtype=dt) + channel_defaults[i]\r\n else:\r\n # use a recurrent call to compute the channel and store it\r\n # in the output\r\n out_array[:, :, i] = tile_raster_images(\r\n X[i], img_shape, tile_shape, tile_spacing,\r\n scale_rows_to_unit_interval, output_pixel_vals)\r\n return out_array\r\n\r\n else:\r\n # if we are dealing with only one channel\r\n H, W = img_shape\r\n Hs, Ws = tile_spacing\r\n\r\n # generate a matrix to store the output\r\n dt = X.dtype\r\n if output_pixel_vals:\r\n dt = 'uint8'\r\n out_array = numpy.zeros(out_shape, dtype=dt)\r\n\r\n for tile_row in xrange(tile_shape[0]):\r\n for tile_col in xrange(tile_shape[1]):\r\n if tile_row * tile_shape[1] + tile_col < X.shape[0]:\r\n this_x = X[tile_row * tile_shape[1] + tile_col]\r\n if scale_rows_to_unit_interval:\r\n # if we should scale values to be between 0 and 1\r\n # do this by calling the `scale_to_unit_interval`\r\n # function\r\n this_img = scale_to_unit_interval(\r\n this_x.reshape(img_shape))\r\n else:\r\n this_img = this_x.reshape(img_shape)\r\n # add the slice to the corresponding position in the\r\n # output array\r\n c = 1\r\n if output_pixel_vals:\r\n c = 255\r\n out_array[\r\n tile_row * (H + Hs): tile_row * (H + Hs) + H,\r\n tile_col * (W + Ws): tile_col * (W + Ws) + W\r\n ] = this_img * c\r\n return out_array",
"def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),\n scale_rows_to_unit_interval=True,\n output_pixel_vals=True):\n\n assert len(img_shape) == 2\n assert len(tile_shape) == 2\n assert len(tile_spacing) == 2\n\n # The expression below can be re-written in a more C style as\n # follows :\n #\n # out_shape = [0,0]\n # out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -\n # tile_spacing[0]\n # out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -\n # tile_spacing[1]\n out_shape = [\n (ishp + tsp) * tshp - tsp\n for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)\n ]\n\n if isinstance(X, tuple):\n assert len(X) == 4\n # Create an output numpy ndarray to store the image\n if output_pixel_vals:\n out_array = numpy.zeros((out_shape[0], out_shape[1], 4),\n dtype='uint8')\n else:\n out_array = numpy.zeros((out_shape[0], out_shape[1], 4),\n dtype=X.dtype)\n\n #colors default to 0, alpha defaults to 1 (opaque)\n if output_pixel_vals:\n channel_defaults = [0, 0, 0, 255]\n else:\n channel_defaults = [0., 0., 0., 1.]\n\n for i in xrange(4):\n if X[i] is None:\n # if channel is None, fill it with zeros of the correct\n # dtype\n dt = out_array.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array[:, :, i] = numpy.zeros(\n out_shape,\n dtype=dt\n ) + channel_defaults[i]\n else:\n # use a recurrent call to compute the channel and store it\n # in the output\n out_array[:, :, i] = tile_raster_images(\n X[i], img_shape, tile_shape, tile_spacing,\n scale_rows_to_unit_interval, output_pixel_vals)\n return out_array\n\n else:\n # if we are dealing with only one channel\n H, W = img_shape\n Hs, Ws = tile_spacing\n\n # generate a matrix to store the output\n dt = X.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array = numpy.zeros(out_shape, dtype=dt)\n\n for tile_row in xrange(tile_shape[0]):\n for tile_col in xrange(tile_shape[1]):\n if tile_row * tile_shape[1] + tile_col < X.shape[0]:\n this_x = X[tile_row * tile_shape[1] + tile_col]\n if scale_rows_to_unit_interval:\n # if we should scale values to be between 0 and 1\n # do this by calling the `scale_to_unit_interval`\n # function\n this_img = scale_to_unit_interval(\n this_x.reshape(img_shape))\n else:\n this_img = this_x.reshape(img_shape)\n # add the slice to the corresponding position in the\n # output array\n c = 1\n if output_pixel_vals:\n c = 255\n out_array[\n tile_row * (H + Hs): tile_row * (H + Hs) + H,\n tile_col * (W + Ws): tile_col * (W + Ws) + W\n ] = this_img * c\n return out_array",
"def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),\n scale_rows_to_unit_interval=True,\n output_pixel_vals=True):\n \n assert len(img_shape) == 2\n assert len(tile_shape) == 2\n assert len(tile_spacing) == 2\n \n # The expression below can be re-written in a more C style as\n # follows :\n #\n # out_shape = [0,0]\n # out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -\n # tile_spacing[0]\n # out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -\n # tile_spacing[1]\n out_shape = [\n (ishp + tsp) * tshp - tsp\n for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)\n ]\n \n if isinstance(X, tuple):\n assert len(X) == 4\n # Create an output numpy ndarray to store the image\n # colors default to 0 (i.e. black), alphas defaults to 1 (fully opaque i.e.\n # corresponding pixel fully visible in image))\n if output_pixel_vals:\n out_array = np.zeros((out_shape[0], out_shape[1], 4),\n dtype='uint8') \n else:\n out_array = np.zeros((out_shape[0], out_shape[1], 4),\n dtype=X.dtype) \n\n if output_pixel_vals:\n channel_defaults = [0, 0, 0, 255]\n else:\n channel_defaults = [0., 0., 0., 1.]\n \n for i in range(4):\n if X[i] is None:\n # if channel is None, fill it with zeros of the correct\n # dtype\n dt = out_array.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array[:, :, i] = np.zeros(\n out_shape,\n dtype=dt\n ) + channel_defaults[i]\n else:\n # use a recurrent call to compute the channel and store it\n # in the output\n out_array[:, :, i] = tile_raster_images(\n X[i], img_shape, tile_shape, tile_spacing,\n scale_rows_to_unit_interval, output_pixel_vals)\n return out_array\n \n else:\n # if we are dealing with only one channel\n H, W = img_shape\n Hs, Ws = tile_spacing\n \n # generate a matrix to store the output\n dt = X.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array = np.ones(out_shape, dtype=dt)*255\n \n for tile_row in range(tile_shape[0]):\n for tile_col in range(tile_shape[1]):\n if tile_row * tile_shape[1] + tile_col < X.shape[0]:\n this_x = X[tile_row * tile_shape[1] + tile_col]\n if scale_rows_to_unit_interval:\n # if we should scale values to be between 0 and 1\n # do this by calling the `scale_to_unit_interval`\n # function\n this_img = scale_to_unit_interval(\n this_x.reshape(img_shape))\n else:\n this_img = this_x.reshape(img_shape)\n # add the slice to the corresponding position in the\n # output array\n c = 1\n if output_pixel_vals:\n c = 255\n out_array[\n tile_row * (H + Hs): tile_row * (H + Hs) + H,\n tile_col * (W + Ws): tile_col * (W + Ws) + W\n ] = this_img * c\n return out_array",
"def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),\n scale_rows_to_unit_interval=True,\n output_pixel_vals=True):\n\n assert len(img_shape) == 2\n assert len(tile_shape) == 2\n assert len(tile_spacing) == 2\n\n # The expression below can be re-written in a more C style as\n # follows :\n #\n # out_shape = [0,0]\n # out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -\n # tile_spacing[0]\n # out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -\n # tile_spacing[1]\n out_shape = [\n (ishp + tsp) * tshp - tsp\n for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)\n ]\n\n if isinstance(X, tuple):\n assert len(X) == 4\n # Create an output np ndarray to store the image\n if output_pixel_vals:\n out_array = np.zeros((out_shape[0], out_shape[1], 4),\n dtype='uint8')\n else:\n out_array = np.zeros((out_shape[0], out_shape[1], 4),\n dtype=X.dtype)\n\n # colors default to 0, alpha defaults to 1 (opaque)\n if output_pixel_vals:\n channel_defaults = [0, 0, 0, 255]\n else:\n channel_defaults = [0., 0., 0., 1.]\n\n for i in xrange(4):\n if X[i] is None:\n # if channel is None, fill it with zeros of the correct\n # dtype\n dt = out_array.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array[:, :, i] = np.zeros(\n out_shape,\n dtype=dt\n ) + channel_defaults[i]\n else:\n # use a recurrent call to compute the channel and store it\n # in the output\n out_array[:, :, i] = tile_raster_images(\n X[i], img_shape, tile_shape, tile_spacing,\n scale_rows_to_unit_interval, output_pixel_vals)\n return out_array\n\n else:\n # if we are dealing with only one channel\n H, W = img_shape\n Hs, Ws = tile_spacing\n\n # generate a matrix to store the output\n dt = X.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array = np.zeros(out_shape, dtype=dt)\n\n for tile_row in xrange(tile_shape[0]):\n for tile_col in xrange(tile_shape[1]):\n if tile_row * tile_shape[1] + tile_col < X.shape[0]:\n this_x = X[tile_row * tile_shape[1] + tile_col]\n if scale_rows_to_unit_interval:\n # if we should scale values to be between 0 and 1\n # do this by calling the `scale_to_unit_interval`\n # functionmapping\n this_img = scale_to_unit_interval(\n this_x.reshape(img_shape))\n else:\n this_img = this_x.reshape(img_shape)\n # add the slice to the corresponding position in the\n # output array\n c = 1\n if output_pixel_vals:\n c = 255\n out_array[\n tile_row * (H + Hs): tile_row * (H + Hs) + H,\n tile_col * (W + Ws): tile_col * (W + Ws) + W\n ] = this_img * c\n return out_array",
"def stitch_map(tiles, width, height, bbox, dpi):\n size = (int(width * dpi_to_dpmm(dpi)), int(height * dpi_to_dpmm(dpi)))\n background = Image.new('RGBA', size, (255, 255, 255))\n for layer in tiles:\n layer_img = Image.new(\"RGBA\", size)\n for (x, y), tile_path in layer.items():\n tile = Image.open(tile_path)\n layer_img.paste(tile, ((x - bbox.min.x) * TILE_SIZE, (y - bbox.min.y) * TILE_SIZE))\n background = Image.alpha_composite(background, layer_img)\n add_scales_bar(background, bbox)\n return background.convert(\"RGB\")",
"def get_tiles(self) -> list:\n n_rows = self.mosaic_dimensions[0]\n n_columns = self.mosaic_dimensions[1]\n return [\n self.get_tile(i_row, i_column)\n for i_row in range(n_rows)\n for i_column in range(n_columns)\n ]",
"def slice(\n filename,\n number_tiles=None,\n col=None,\n row=None,\n save=True,\n DecompressionBombWarning=True,\n):\n if DecompressionBombWarning is False:\n Image.MAX_IMAGE_PIXELS = None\n\n im = Image.open(filename)\n im_w, im_h = im.size\n\n columns = 0\n rows = 0\n if number_tiles:\n validate_image(im, number_tiles)\n columns, rows = calc_columns_rows(number_tiles)\n else:\n validate_image_col_row(im, col, row)\n columns = col\n rows = row\n\n tile_w, tile_h = int(floor(im_w / columns)), int(floor(im_h / rows))\n\n tiles = []\n number = 1\n for pos_y in range(0, im_h - rows, tile_h): # -rows for rounding error.\n for pos_x in range(0, im_w - columns, tile_w): # as above.\n area = (pos_x, pos_y, pos_x + tile_w, pos_y + tile_h)\n image = im.crop(area)\n position = (int(floor(pos_x / tile_w)) + 1, int(floor(pos_y / tile_h)) + 1)\n coords = (pos_x, pos_y)\n tile = Tile(image, number, position, coords)\n tiles.append(tile)\n number += 1\n if save:\n save_tiles(\n tiles, prefix=get_basename(filename), directory=os.path.dirname(filename)\n )\n return tuple(tiles)",
"def readTiles(self):\n TileImage = Image.open(self.Filename).convert(\"RGB\")\n TileIW, TileIH = TileImage.size\n TilesetW, TilesetH = TileIW // self.TileWidth, TileIH // self.TileHeight\n\n for y in range(TilesetH):\n for x in range(TilesetW):\n box = self.TileWidth * x, self.TileHeight * y, self.TileWidth * (x+1), self.TileHeight * (y+1)\n tile = TileImage.crop(box)\n self.List.append(tile)\n\n str = tile.tostring()\n if not str in self.TileDict:\n #print(\"add tile: \", str)\n self.TileDict[str] = len(self.List) - 1\n print(\"tile count: {}, unique count: {}\".format(len(self.List),len(self.TileDict.values())))",
"def split_image_into_tiles_of_size(arr: Image, tile_w: int, tile_h: int, overlap: int):\n x_axis = -1\n y_axis = -2\n arr_width, arr_height = arr.shape[x_axis], arr.shape[y_axis]\n\n x_ntiles = (\n arr_width // tile_w if arr_width % tile_w == 0 else (arr_width // tile_w) + 1\n )\n y_ntiles = (\n arr_height // tile_h if arr_height % tile_h == 0 else (arr_height // tile_h) + 1\n )\n\n tiles = []\n\n # row\n for i in range(0, y_ntiles):\n # height of this tile\n ver_f = tile_h * i\n ver_t = ver_f + tile_h\n\n # col\n for j in range(0, x_ntiles):\n # width of this tile\n hor_f = tile_w * j\n hor_t = hor_f + tile_w\n\n tile = get_tile(arr, hor_f, hor_t, ver_f, ver_t, overlap)\n\n tiles.append(tile)\n tile_shape = [tile_h, tile_w]\n ntiles = dict(x=x_ntiles, y=y_ntiles)\n padding = dict(left=0, right=0, top=0, bottom=0)\n if arr_width % tile_w == 0:\n padding[\"right\"] = 0\n else:\n padding[\"right\"] = tile_w - (arr_width % tile_w)\n if arr_height % tile_h == 0:\n padding[\"bottom\"] = 0\n else:\n padding[\"bottom\"] = tile_h - (arr_height % tile_h)\n info = dict(tile_shape=tile_shape, ntiles=ntiles, overlap=overlap, padding=padding)\n return tiles, info",
"def get_tiles():\n\t\t\n\tcursor = get_cursor()\n\t\n\tcursor.execute(\"SELECT * FROM fitmeimages ORDER BY shade ASC, id ASC\")\n\treturn cursor.fetchall();",
"def test_unbounded_tileset_image(self):\n\t\t# Create an 8x6 tileset image placeholder\n\t\tself.expected_tile_width = 8\n\t\tself.expected_tile_height = 6\n\t\tself.expected_rows = self.expected_tile_height\n\t\tself.expected_cols = self.expected_tile_width\n\n\t\tself.test_image = dummy_image(self.expected_width(), self.expected_height())\n\t\tself.test_image_grid = TextureGrid(ImageGrid(self.test_image, self.expected_rows, self.expected_cols))\n\n\t\t# Test creating a TilesetImage without specifying dimensions\n\t\tself.tileset_image = TilesetImage(self.test_image)\n\n\t\tself.assert_tileset_image('Rows and columns not specified.')",
"def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),\n scale_rows_to_unit_interval=True,\n output_pixel_vals=True):\n\n assert len(img_shape) == 2\n assert len(tile_shape) == 2\n assert len(tile_spacing) == 2\n\n out_shape = [\n (ishp + tsp) * tshp - tsp\n for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)\n ]\n\n if isinstance(X, tuple):\n assert len(X) == 4\n # Create an output numpy ndarray to store the image\n if output_pixel_vals:\n out_array = numpy.zeros((out_shape[0], out_shape[1], 4),\n dtype='uint8')\n else:\n out_array = numpy.zeros((out_shape[0], out_shape[1], 4),\n dtype=X.dtype)\n\n #colors default to 0, alpha defaults to 1 (opaque)\n if output_pixel_vals:\n channel_defaults = [0, 0, 0, 255]\n else:\n channel_defaults = [0., 0., 0., 1.]\n\n for i in xrange(4):\n if X[i] is None:\n # if channel is None, fill it with zeros of the correct\n # dtype\n dt = out_array.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array[:, :, i] = numpy.zeros(\n out_shape,\n dtype=dt\n ) + channel_defaults[i]\n else:\n # use a recurrent call to compute the channel and store it\n # in the output\n out_array[:, :, i] = tile_raster_images(\n X[i], img_shape, tile_shape, tile_spacing,\n scale_rows_to_unit_interval, output_pixel_vals)\n return out_array\n\n else:\n # if we are dealing with only one channel\n H, W = img_shape\n Hs, Ws = tile_spacing\n\n # generate a matrix to store the output\n dt = X.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array = numpy.zeros(out_shape, dtype=dt)\n\n for tile_row in xrange(tile_shape[0]):\n for tile_col in xrange(tile_shape[1]):\n if tile_row * tile_shape[1] + tile_col < X.shape[0]:\n this_x = X[tile_row * tile_shape[1] + tile_col]\n if scale_rows_to_unit_interval:\n # if we should scale values to be between 0 and 1\n # do this by calling the `scale_to_unit_interval`\n # function\n this_img = scale_to_unit_interval(\n this_x.reshape(img_shape))\n else:\n this_img = this_x.reshape(img_shape)\n # add the slice to the corresponding position in the\n # output array\n c = 1\n if output_pixel_vals:\n c = 255\n out_array[\n tile_row * (H + Hs): tile_row * (H + Hs) + H,\n tile_col * (W + Ws): tile_col * (W + Ws) + W\n ] = this_img * c\n return out_array",
"def make_tiles(input_path, save_path, dimension):\n for filename in os.listdir(input_path):\n if filename.endswith(\".png\"):\n image_path = input_path + filename\n\n width, height = Image.open(image_path).size\n\n # Ensures image is square.\n assert width == height\n # Ensures the image can be cut into the desired dimensions.\n assert width % dimension == 0\n n_tiles = (width / dimension) ** 2\n\n tiles = image_slicer.slice(image_path, n_tiles, save=False)\n image_slicer.save_tiles(\n tiles, directory=save_path, prefix=filename[0:2], format=\"png\"\n )",
"def mbtiles(ctx, files, output, overwrite, title, description,\n layer_type, img_format, tile_size, zoom_levels, image_dump,\n num_workers, src_nodata, dst_nodata, resampling):\n output, files = resolve_inout(files=files, output=output,\n overwrite=overwrite)\n inputfile = files[0]\n\n logger = logging.getLogger('rio-mbtiles')\n\n with ctx.obj['env']:\n\n # Read metadata from the source dataset.\n with rasterio.open(inputfile) as src:\n\n validate_nodata(dst_nodata, src_nodata, src.profile.get('nodata'))\n base_kwds = {'dst_nodata': dst_nodata, 'src_nodata': src_nodata}\n\n if src_nodata is not None:\n base_kwds.update(nodata=src_nodata)\n\n if dst_nodata is not None:\n base_kwds.update(nodata=dst_nodata)\n\n # Name and description.\n title = title or os.path.basename(src.name)\n description = description or src.name\n\n # Compute the geographic bounding box of the dataset.\n (west, east), (south, north) = transform(\n src.crs, 'EPSG:4326', src.bounds[::2], src.bounds[1::2])\n\n # Resolve the minimum and maximum zoom levels for export.\n if zoom_levels:\n minzoom, maxzoom = map(int, zoom_levels.split('..'))\n else:\n zw = int(round(math.log(360.0 / (east - west), 2.0)))\n zh = int(round(math.log(170.1022 / (north - south), 2.0)))\n minzoom = min(zw, zh)\n maxzoom = max(zw, zh)\n\n logger.debug(\"Zoom range: %d..%d\", minzoom, maxzoom)\n\n # Parameters for creation of tile images.\n base_kwds.update({\n 'driver': img_format.upper(),\n 'dtype': 'uint8',\n 'nodata': 0,\n 'height': tile_size,\n 'width': tile_size,\n 'count': 3,\n 'crs': TILES_CRS})\n\n img_ext = 'jpg' if img_format.lower() == 'jpeg' else 'png'\n\n # Initialize the sqlite db.\n if os.path.exists(output):\n os.unlink(output)\n # workaround for bug here: https://bugs.python.org/issue27126\n sqlite3.connect(':memory:').close()\n\n conn = sqlite3.connect(output)\n cur = conn.cursor()\n cur.execute(\n \"CREATE TABLE tiles \"\n \"(zoom_level integer, tile_column integer, \"\n \"tile_row integer, tile_data blob);\")\n cur.execute(\n \"CREATE TABLE metadata (name text, value text);\")\n\n # Insert mbtiles metadata into db.\n cur.execute(\n \"INSERT INTO metadata (name, value) VALUES (?, ?);\",\n (\"name\", title))\n cur.execute(\n \"INSERT INTO metadata (name, value) VALUES (?, ?);\",\n (\"type\", layer_type))\n cur.execute(\n \"INSERT INTO metadata (name, value) VALUES (?, ?);\",\n (\"version\", \"1.1\"))\n cur.execute(\n \"INSERT INTO metadata (name, value) VALUES (?, ?);\",\n (\"description\", description))\n cur.execute(\n \"INSERT INTO metadata (name, value) VALUES (?, ?);\",\n (\"format\", img_ext))\n cur.execute(\n \"INSERT INTO metadata (name, value) VALUES (?, ?);\",\n (\"bounds\", \"%f,%f,%f,%f\" % (west, south, east, north)))\n\n conn.commit()\n\n # Create a pool of workers to process tile tasks.\n pool = Pool(num_workers, init_worker,\n (inputfile, base_kwds, resampling), 100)\n\n # Constrain bounds.\n EPS = 1.0e-10\n west = max(-180 + EPS, west)\n south = max(-85.051129, south)\n east = min(180 - EPS, east)\n north = min(85.051129, north)\n\n # Initialize iterator over output tiles.\n tiles = mercantile.tiles(\n west, south, east, north, range(minzoom, maxzoom + 1))\n\n for tile, contents in pool.imap_unordered(process_tile, tiles):\n\n if contents is None:\n logger.info(\"Tile %r is empty and will be skipped\", tile)\n continue\n\n # MBTiles has a different origin than Mercantile/tilebelt.\n tiley = int(math.pow(2, tile.z)) - tile.y - 1\n\n # Optional image dump.\n if image_dump:\n img_name = '%d-%d-%d.%s' % (\n tile.x, tiley, tile.z, img_ext)\n img_path = os.path.join(image_dump, img_name)\n with open(img_path, 'wb') as img:\n img.write(contents)\n\n # Insert tile into db.\n cur.execute(\n \"INSERT INTO tiles \"\n \"(zoom_level, tile_column, tile_row, tile_data) \"\n \"VALUES (?, ?, ?, ?);\",\n (tile.z, tile.x, tiley, buffer(contents)))\n\n conn.commit()\n\n conn.close()\n # Done!",
"def _generate_images(self, trace):\n images = []\n colors = []\n colors_by_shape = {}\n for board in trace:\n width = int(round((float(board.shape[1]) / board.shape[0]) * self._height))\n cellsize = width / board.shape[1] # cell size\n img = np.zeros((self._height, width, 3), dtype=np.uint8)\n\n tiles = {} # map from integer rep. of the tile to a shape\n for y in range(board.shape[0]):\n for x in range(board.shape[1]):\n cell = board[y,x]\n if cell not in tiles:\n tiles[cell] = (x, y, 1, 1) # x, y, w, h\n else:\n cur_x, cur_y, cur_w, cur_h = tiles[cell]\n if x >= cur_x + cur_w:\n cur_w = (x-cur_x) + 1\n if y >= cur_y + cur_h:\n cur_h = (y-cur_y) + 1\n tiles[cell] = (cur_x, cur_y, cur_w, cur_h)\n\n # Colors\n if len(colors_by_shape) == 0:\n for tid in tiles:\n shape = (tiles[tid][2], tiles[tid][3])\n if shape not in colors_by_shape:\n colors_by_shape[shape] = hex_to_rgb(random_unique_color(colors))\n colors.append(colors_by_shape[shape])\n\n for tid in tiles:\n x, y, w, h = tiles[tid]\n shape = (w,h)\n empty = board[y,x] == 0\n x, y, w, h = x*cellsize, y*cellsize, w*cellsize, h*cellsize\n # Draw a filled rectangle without color\n if not empty:\n cv2.rectangle(img, (x, y), (x+w, y+h), colors_by_shape[shape],-1)\n else:\n cv2.rectangle(img, (x, y), (x+w, y+h), [0,0,0], -1) #, 8)-\n # Draw a boundary\n cv2.rectangle(img, (x, y), (x+w, y+h), (0, 0, 0), 2, 8)\n \n images.append(img)\n return images",
"def build_tiles(img,tilefile,tilesize,options=[]):\n\tlevels=ceil(log(max(img.get_xsize(),img.get_ysize())/tilesize)/log(2.0))\n\t\n\ttf=file(tilefile,\"w\")\n\t\n\ttile_dict={}\n\tpos=0\n\timg2=img.copy()\n\txs,ys=img2.get_xsize(),img2.get_ysize()\n\tfor l in range(int(levels)):\n\t\trmin=img2.get_attr(\"mean\")-img2.get_attr(\"sigma\")*3.0\n\t\trmax=img2.get_attr(\"mean\")+img2.get_attr(\"sigma\")*3.0\n\t\tfor x in range(0,img2.get_xsize(),tilesize):\n\t\t\tfor y in range(0,img2.get_ysize(),tilesize):\n\t\t\t\ti=img2.get_clip(Region(x,y,tilesize,tilesize))\n\t\t\t\ti.set_attr(\"render_min\",rmin)\n\t\t\t\ti.set_attr(\"render_max\",rmax)\n\t\t\t\ti.set_attr(\"jpeg_quality\",70)\n\t\t\t\tfsp=\"tmpimg.%d.%03d.%03d.jpg\"%(l,x/tilesize,y/tilesize)\n\t\t\t\ti.write_image(fsp)\n\t\t\t\tsz=os.stat(fsp).st_size\n\t\t\t\ttile_dict[(l,x/tilesize,y/tilesize)]=(pos,sz)\n\t\t\t\tpos+=sz\n\t\timg2.process_inplace(\"math.meanshrink\",{\"n\":2})\n\t\n\t# This will produce 2 power spectrum images in the tile file\n\t# with scale factors -1 and -2\n\tif \"pspec\" in options :\n\t\tnx,ny=img.get_xsize()/512,img.get_ysize()/512\n\t\ta=EMData()\n\t\ta.set_size(512,512)\n\t\tif (ny>2 and nx>2) :\n\t\t\tfor y in range(1,ny-1):\n\t\t\t\tfor x in range(1,nx-1):\n\t\t\t\t\tc=img.get_clip(Region(x*512,y*512,512,512))\n\t\t\t\t\tc.process_inplace(\"normalize\")\n\t\t\t\t\tc.process_inplace(\"math.realtofft\")\n\t\t\t\t\tc.process_inplace(\"math.squared\")\n\t\t\t\t\ta+=c\n\t\t\ta.set_value_at(256,256,0,.01)\n\t\t\ta-=a.get_attr(\"minimum\")-a.get_attr(\"sigma\")*.01\n\t\t\ta.process_inplace(\"math.log\")\n\t\t\ta-=a.get_attr(\"minimum\")\n\t\t\ta.set_attr(\"render_min\",a.get_attr(\"minimum\")-a.get_attr(\"sigma\")*.1)\n\t\t\ta.set_attr(\"render_max\",a.get_attr(\"mean\")+a.get_attr(\"sigma\")*4.0)\n\t\t\ta.set_attr(\"jepg_quality\",80)\n\t\t\ta.write_image(\"/tmp/tmpimg.mrc\")\n\t\t\tfsp=\"tmpimg.jpg\"\n\t\t\ta.write_image(fsp)\n\t\t\tsz=os.stat(fsp).st_size\n\t\t\ttile_dict[(-1,0,0)]=(pos,sz)\n\t\t\tpos+=sz\n\t\n#\t\ttry:\n\t\t\timport matplotlib\n\t\t\tmatplotlib.use('Agg')\n\t\t\timport pylab\n\t\t\tmanager = pylab.get_current_fig_manager()\n\t\t\tapix=options[\"pspec\"]\n\t\t\tdx=1.0/(2.0*apix*256.0)\n\t\t\tx=pylab.arange(dx,dx*255.9,dx)\n\t\t\ty=a.calc_radial_dist(255,1,1,0)\t# radial power spectrum (log)\n\t\t\tpylab.figure(figsize=(8,6),dpi=96)\n\t\t\tpylab.axes([.08,.08,.9,.9], axisbg='w')\n\t\t\tpylab.plot(x,y)\n\t\t\tpylab.axis([0,dx*256,min(y),max(y)])\n\t\t\tpylab.xlabel(\"Spatial Freq. (1/A)\")\n\t\t\tpylab.ylabel(\"Log Intensity (10^x)\")\n#\t\t\tprint y\n\t\t\t\n\t\t\tfsp=\"tmpimg2.png\"\n\t\t\tpylab.savefig(fsp,dpi=96)\n\t\t\tsz=os.stat(fsp).st_size\n\t\t\ttile_dict[(-2,0,0)]=(pos,sz)\n\t\t\tpos+=sz\n\n#\t\texcept:\n#\t\t\tprint \"Unable to generate plot (need matplotlib)\"\n\t\t\t\n\t\n\tpickle.dump(tile_dict,tf)\n\t\n\tfor l in range(int(levels)):\n\t\tfor x in range(0,xs,tilesize):\n\t\t\tfor y in range(0,ys,tilesize):\n\t\t\t\tfsp=\"tmpimg.%d.%03d.%03d.jpg\"%(l,x/tilesize,y/tilesize)\n\t\t\t\ta=file(fsp,\"r\")\n\t\t\t\tb=a.read()\n\t\t\t\ta.close()\n\t\t\t\ttf.write(b)\n\t\t\t\tos.remove(fsp)\n\t\txs/=2\n\t\tys/=2\n\t\n\tif \"pspec\" in options :\n\t\tfor fsp in [\"tmpimg.jpg\",\"tmpimg2.png\"] :\n\t\t\ta=file(fsp,\"r\")\n\t\t\tb=a.read()\n\t\t\ta.close()\n\t\t\ttf.write(b)\n#\t\t\tos.remove(fsp)\n\t\n\ttf.close()",
"def __init__(self, width, height):\n\t\tself.width = width\n\t\tself.height = height\n\t\tself.numTiles = width*height\n\t\tself.tiles = []\n\t\tfor i in range(0, width):\n\t\t\tfor j in range(0, height):\n\t\t\t\tself.tiles.append(Tile(i, j))",
"def MakeCoordPlot(tiles, coords, image_size=10000, boarder_width=20):\n tile_size = tiles.shape[1]\n\n grid_coords = Cloud2Grid(\n coords, grid_dim=(image_size - 2 * tile_size), tile_size=tile_size\n )\n grid_coords = grid_coords + tile_size # for black boarder\n grid_image = Image.new(\"RGB\", (image_size, image_size))\n for i in range(len(tiles)): # paste each tile onto image\n tile = ColorTileBoarder(tiles[i], channel=0, boarder_width=2)\n tile = Image.fromarray(tiles[i])\n x, y = grid_coords[i, :]\n grid_image.paste(tile, (int(x), int(y)))\n coords[\"grid1\"] = grid_coords[:, 0] + tile_size // 2\n coords[\"grid2\"] = grid_coords[:, 1] + tile_size // 2\n return grid_image, coords"
] | [
"0.6996957",
"0.6695184",
"0.63886476",
"0.62590015",
"0.6253077",
"0.6211737",
"0.62029433",
"0.60706353",
"0.6000102",
"0.596734",
"0.5926167",
"0.5918759",
"0.5894937",
"0.5867754",
"0.5861465",
"0.5857397",
"0.5848029",
"0.5744577",
"0.5702461",
"0.5656095",
"0.56503665",
"0.56347495",
"0.56256145",
"0.5623565",
"0.56085813",
"0.5607058",
"0.55955684",
"0.55894506",
"0.55823416",
"0.55605465"
] | 0.7703681 | 0 |
Determine column and row position for filename. | def get_image_column_row(filename):
row, column = os.path.splitext(filename)[0][-5:].split("_")
return (int(column) - 1, int(row) - 1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_position(self): # maybe encoded in filepath at some point\n result = (self.iter * self.row_step)% self.row_size, self.iter // (self.row_size * self.row_step)* self.col_step\n self.iter += 1\n return result",
"def ind(self, pos):\n row = int(pos[1:]) - 1\n column = self.letter_to_column(pos[0])\n return row, column",
"def ind(self, pos):\n row = int(pos[1:]) - 1\n column = self.letter_to_column(pos[0])\n return row, column",
"def ind(self, pos):\n row = int(pos[1:]) - 1\n column = self.letter_to_column(pos[0])\n return row, column",
"def position(file_, pattern):\n pattern = pattern[1:-1]\n pattern = pattern.replace('(', '\\(')\n pattern = pattern.replace(')', '\\)')\n file_obj = open(file_, 'rU')\n for line_number, line in enumerate(file_obj):\n m = re.search(pattern, line)\n if m is not None:\n return line_number, m.pos\n file_obj.close()\n return 0, 0",
"def get_position(self, number):\n for rowidx, row in enumerate(self.numbers):\n for colidx, num in enumerate(row):\n if num == number:\n return rowidx, colidx",
"def extract_row_and_col_number(self, entry):\n\n row_col_string = entry.split(\"_\")\n row = int(row_col_string[0][1:])\n col = int(row_col_string[1][1:])\n return row, col",
"def column_index(input_file, name):\n col, com = find_columns(input_file)\n col_name = name\n contents = open(input_file, 'r').readlines()\n for line in contents:\n if com[col.index(col_name)] in line:\n line_index = contents.index(line)+1\n return line_index",
"def position(self) -> Tuple[int, int]:\n return self.row, self.col",
"def _errpos(self, fpos):\r\n filename, string = self._includestack[-1]\r\n return filename, srow(string, fpos), scol(string, fpos)",
"def get_position(filestring, position):\n lines = filestring.split(\"\\n\")\n line_number, place, count = 0, 0, 0\n #print \"Number of lines: \", len(lines)\n \n while line_number < len(lines):\n line = lines[line_number]\n new_count = count + len(line) #+ 1 # +1 nes dar newline pridedame\n if position <= new_count:\n place = position - count\n break\n count = new_count # +1 nes dar newline pridedame\n line_number += 1\n \n print \"\\n\".join([\"%s:%s\" % ((\"===> \" if i==line_number else \"\") + str(i), lines[i]) for i in xrange(len(lines))])\n return (line_number, place)",
"def get_position(self):\n\n return (self._fileobj.tell() - self._pos) * 8 - self._bits",
"def part2(filename: str) -> int:\n data = parse(filename)\n return index_of_floor(data, -1)",
"def get_coords(self) -> Tuple[int]:\r\n return self.file, self.rank",
"def get_pos_index(self):\n return [self.row-1, self.col-1]",
"def count_positions(fname):\r\n with open(fname) as f:\r\n for i, l in enumerate(f):\r\n pass\r\n return i + 1",
"def pos(self):\n return self.file.tell()",
"def get_current_position(self) -> Tuple[int, int]:\n return self.__row_position, self.__col_position",
"def cursor_coordinates(self):\n text = self.getText()\n lines = text.split(\"\\n\")\n pos = self.getCursorPos()\n if pos == 0:\n return (0, 0)\n i = 0\n cursor_row = -1\n cursor_col = -1\n for row, line in enumerate(lines):\n i += len(line) + 1 # we need to include \"\\n\"\n if pos < i:\n cursor_row = row\n cursor_col = pos - i + len(line) + 1\n break\n return (cursor_col, cursor_row)",
"def _get_header_position(header_row: List[str], column_title: str) -> int:\n for pos, column in enumerate(header_row):\n if column_title.lower() in column.lower():\n return pos\n\n raise Exception(\"Expected column header not found for {}\".format(column_title))",
"def get_column(filename, column_name):\n with open(filename) as f:\n for header in f:\n columns = header.rstrip().split(\"\\t\")\n return columns.index(column_name)",
"def _get_file_info(filename):\n filename = os.path.split(filename)[-1]\n filename = filename[:str.rfind(filename, '.jsonl.gz')]\n _, mode, idx = filename.split('_')\n return mode, idx",
"def get_row_col_number(self, index):\n row_num = index // self.spatial_cols\n col_num = index % self.spatial_cols\n return row_num, col_num",
"def xFileInfo(filename):\n delim = getDelimiter(filename)\n f = open(filename, 'r')\n reader = csv.reader(f, delimiter=delim)\n num_rows = 0\n for (row_i, row) in enumerate(reader):\n if row_i == 0: #ignore empty strings (e.g. at end of row)\n num_cols = len([val for val in row if val])\n num_rows += 1\n f.close()\n return (num_rows, num_cols)",
"def line_col_to_offset(self,\n snapshot: Bug,\n filepath: str,\n line_num: int,\n col_num: int\n ) -> int:\n assert line_num > 0\n assert col_num >= 0\n line_col_s = \"%s/%s[%d:%d]\".format(snapshot.name,\n filepath,\n line_num,\n col_num)\n logger.debug(\"Transforming line-column, '%s', into a character offset\", # noqa: pycodestyle\n line_col_s)\n line_offsets = self._line_offsets(snapshot, filepath)\n line_starts_at = line_offsets[line_num - 1]\n offset = line_starts_at + col_num\n logger.debug(\"Transformed line-column, '%s', into character offset: %s\", # noqa: pycodestyle\n line_col_s,\n offset)\n return offset",
"def _get_column_offset(self, lnum, colnum, **opts):\n start, end = self._get_linespan(lnum)\n length = end - start\n cpos = self._col2pos(start, colnum, **opts)\n if cpos < 0 or cpos >= length:\n raise IndexError(\"column out of bounds\")\n\n return start + cpos",
"def index_to_position(self, index):\n col = index % self._grid_size\n row = index // self._grid_size\n return row, col",
"def find_column(text, index):\n\n last_cr = text.rfind(\"\\n\", 0, index)\n if last_cr < 0:\n last_cr = 0\n column = (index - last_cr) + 1\n return column",
"def line_offsets(fname):\n line_offset = []\n offset = 0\n for _, line in enumerate( open(fname) ):\n line_offset.append(offset)\n offset += len(line)\n return line_offset",
"def filename_line(skip: int = 2) -> Tuple[str, int]:\n stack = inspect.stack()\n start = skip\n parentframe = stack[start][0]\n\n filename = 'N/A'\n module = inspect.getmodule(parentframe)\n if module:\n filename = os.path.basename(os.path.realpath(module.__file__))\n\n return filename, parentframe.f_lineno"
] | [
"0.6791335",
"0.66289556",
"0.66289556",
"0.66289556",
"0.6522248",
"0.6411789",
"0.63911855",
"0.63667697",
"0.6281292",
"0.62766576",
"0.6266177",
"0.6249092",
"0.6142638",
"0.61390686",
"0.6129921",
"0.60795605",
"0.60582995",
"0.60221326",
"0.6021388",
"0.60078406",
"0.5974924",
"0.59706306",
"0.59696424",
"0.5965513",
"0.59258085",
"0.590232",
"0.58866423",
"0.58864385",
"0.586704",
"0.585859"
] | 0.75118536 | 0 |
Open all images in a directory. Return tuple of Tile instances. | def open_images_in(directory):
files = [
filename
for filename in os.listdir(directory)
if "_" in filename and not filename.startswith("joined")
]
tiles = []
if len(files) > 0:
i = 0
for file in files:
pos = get_image_column_row(file)
im = Image.open(os.path.join(directory, file))
position_xy = [0, 0]
count = 0
for a, b in zip(pos, im.size):
position_xy[count] = a * b
count = count + 1
tiles.append(
Tile(
image=im,
position=pos,
number=i + 1,
coords=position_xy,
filename=file,
)
)
i = i + 1
return tiles | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def load_pic_in_directory(directory):\n return [Image.open(os.path.join(directory, img)) for img in os.listdir(directory)]",
"def get_images(directory=None): #import from mask.py\n \n if directory == None:\n directory = os.getcwd() # Use working directory if unspecified\n \n image_list = [] # Initialize aggregaotrs\n file_list = []\n \n directory_list = os.listdir(directory) # Get list of files\n for entry in directory_list:\n absolute_filename = os.path.join(directory, entry)\n try:\n image = PIL.Image.open(absolute_filename)\n file_list += [entry]\n image_list += [image]\n except IOError:\n pass # do nothing with errors tying to open non-images\n return image_list, file_list",
"def get_images(directory=None):\n \n if directory == None:\n directory = os.getcwd() # Use working directory if unspecified\n \n image_list = [] # Initialize aggregaotrs\n file_list = []\n \n directory_list = os.listdir(directory) # Get list of files\n for entry in directory_list:\n absolute_filename = os.path.join(directory, entry)\n try:\n image = PIL.Image.open(absolute_filename)\n file_list += [entry]\n image_list += [image]\n except IOError:\n pass # do nothing with errors tying to open non-images\n return image_list, file_list",
"def get_images(directory=None):\r\n \r\n if directory == None:\r\n directory = os.getcwd() # Use working directory if unspecified\r\n \r\n image_list = [] # Initialize aggregaotrs\r\n file_list = []\r\n \r\n directory_list = os.listdir(directory) # Get list of files\r\n for entry in directory_list:\r\n if len(file_list)<2:\r\n absolute_filename = os.path.join(directory, entry)\r\n try:\r\n image = PIL.Image.open(absolute_filename)\r\n file_list += [entry]\r\n image_list += [image]\r\n except IOError:\r\n pass # do nothing with errors tying to open non-images\r\n return image_list, file_list",
"def load_images(self, folder):\n cwd = os.getcwd()\n dir = cwd + '/' + folder\n files = os.listdir(dir)\n for file in files:\n img = pygame.image.load(dir + '/' + file)\n self.images.append(img)",
"def load_images_from_folder(folder):\n images = []\n for filename in os.listdir(folder):\n img = Image.open(os.path.join(folder,filename))\n images.append(img)\n return images",
"def open(*args, **kwargs):\n return MultiFileTileSource(*args, **kwargs)",
"def image_iter() -> iter:\r\n return ('Images/' + image for image in IMAGES)",
"def load_images(self, image_paths):\n \n fill_list = []\n \n for idx in tqdm(range(len(image_paths))):\n path = image_paths[idx]\n yield cv2.imread(path)",
"def readTiles(self):\n TileImage = Image.open(self.Filename).convert(\"RGB\")\n TileIW, TileIH = TileImage.size\n TilesetW, TilesetH = TileIW // self.TileWidth, TileIH // self.TileHeight\n\n for y in range(TilesetH):\n for x in range(TilesetW):\n box = self.TileWidth * x, self.TileHeight * y, self.TileWidth * (x+1), self.TileHeight * (y+1)\n tile = TileImage.crop(box)\n self.List.append(tile)\n\n str = tile.tostring()\n if not str in self.TileDict:\n #print(\"add tile: \", str)\n self.TileDict[str] = len(self.List) - 1\n print(\"tile count: {}, unique count: {}\".format(len(self.List),len(self.TileDict.values())))",
"def get_tiles():\n\t\t\n\tcursor = get_cursor()\n\t\n\tcursor.execute(\"SELECT * FROM fitmeimages ORDER BY shade ASC, id ASC\")\n\treturn cursor.fetchall();",
"def get_images(self, file_path: str) -> Iterable[Image]:\n return []",
"def get_existing_images(directory):\n validate_directory(directory)\n directory += '/'\n try:\n return listdir(directory)\n except:\n mkdir(directory)\n return []",
"def load_images_from_directory(input_dir, batch_shape):\n def input_filenames(input_dir):\n all_files = tf.gfile.Glob(os.path.join(input_dir, '*.png'))\n all_files.sort()\n return all_files\n\n\n images = np.zeros(batch_shape)\n filenames = []\n idx = 0\n batch_size = batch_shape[0]\n\n for filepath in input_filenames(input_dir):\n with tf.gfile.Open(filepath, mode='rb') as f:\n image = imread(f, mode='RGB').astype(np.float) / 255.0\n\n # Images for inception classifier are normalized to be in [-1, 1] interval.\n images[idx, :, :, :] = image * 2.0 - 1.0\n filenames.append(os.path.basename(filepath))\n\n idx += 1\n if idx == batch_size:\n yield filenames, images\n filenames = []\n images = np.zeros(batch_shape)\n idx = 0\n\n # This is a partial batch left over at end.\n # Note that images will still have the proper size.\n if idx > 0:\n yield filenames, images",
"def getImages(path):\n files = list()\n\n for f in listdir(path):\n file = join(path, f)\n if isfile(file):\n files.append(getImage(file))\n\n return files",
"def im_open(path):\n\n try:\n assert os.path.isdir(path)\n #get file list in directory - glob includes full path\n files = sorted(glob.glob('{}{}*'.format(path,os.sep)), key=sort_key) \n #load the collection\n raw_stack = io.imread_collection(files)\n #turn the collection into a np array and remove extraneous OCT portion from 1025:1083 on x axis. (z,y,x)\n #if .bmp files are open (from pv-oct), the slicing will not affect them, the x-axis is only 540 pixels.\n stack = io.collection.concatenate_images(raw_stack)[:,:,0:1024]\n \n return stack\n\n except AssertionError:\n sys.exit(\"A non-directory object was given to the __open__ function\")",
"def open(*args, **kwargs):\n return TiffFileTileSource(*args, **kwargs)",
"def readImages(image_dir):\n extensions = ['bmp', 'pbm', 'pgm', 'ppm', 'sr', 'ras', 'jpeg',\n 'jpg', 'jpe', 'jp2', 'tiff', 'tif', 'png']\n\n search_paths = [os.path.join(image_dir, '*.' + ext) for ext in extensions]\n image_files = sorted(sum(map(glob, search_paths), []))\n images = [cv2.imread(f, cv2.IMREAD_UNCHANGED | cv2.IMREAD_COLOR) for f in image_files]\n\n bad_read = any([img is None for img in images])\n if bad_read:\n raise RuntimeError(\n \"Reading one or more files in {} failed - aborting.\"\n .format(image_dir))\n\n return images",
"def load_images(self):\n for image in self.gltf.images:\n self.images.append(image.load(self.path.parent))",
"def load_images(input_dir, batch_shape):\n images = np.zeros(batch_shape)\n filenames = []\n idx = 0\n batch_size = batch_shape[0]\n for filepath in tf.gfile.Glob(os.path.join(input_dir, '*.png')):\n with tf.gfile.Open(filepath) as f:\n image = imread(f, mode='RGB').astype(np.float) / 255.0\n # Images for inception classifier are normalized to be in [-1, 1] interval.\n images[idx, :, :, :] = image * 2.0 - 1.0\n filenames.append(os.path.basename(filepath))\n idx += 1\n if idx == batch_size:\n yield filenames, images\n filenames = []\n images = np.zeros(batch_shape)\n idx = 0\n if idx > 0:\n yield filenames, images",
"def load_images(input_dir, batch_shape):\n images = np.zeros(batch_shape)\n filenames = []\n idx = 0\n batch_size = batch_shape[0]\n for filepath in tf.gfile.Glob(os.path.join(input_dir, '*.png')):\n with tf.gfile.Open(filepath) as f:\n image = np.array(Image.open(f).convert('RGB')).astype(np.float) / 1.0\n # Images for inception classifier are normalized to be in [-1, 1] interval.\n images[idx, :, :, :] = image\n filenames.append(os.path.basename(filepath))\n idx += 1\n if idx == batch_size:\n yield filenames, images\n filenames = []\n images = np.zeros(batch_shape)\n idx = 0\n if idx > 0:\n yield filenames, images",
"def read_local(path):\n files = os.listdir(path)\n imgs = []\n for f in files:\n if f.endswith(\".tiff\") or f.endswith(\".tif\"):\n img = Image.open(os.path.join(path, f))\n imgs.append(np.array(img))\n return imgs",
"def load_images(input_dir, batch_shape):\n images = np.zeros(batch_shape)\n filenames = []\n idx = 0\n batch_size = batch_shape[0]\n # all_files = tf.gfile.Glob(os.path.join(input_dir, '*.png'))\n # test_files = [all_files[idx] for x in np.random.choice(len(all_files), 200, replace=False)]\n # for filepath in test_files:\n for filepath in tf.gfile.Glob(os.path.join(input_dir, '*.png')):\n with tf.gfile.Open(filepath) as f:\n image = imread(f, mode='RGB').astype(np.float) / 255.0\n # Images for inception classifier are normalized to be in [-1, 1] interval.\n images[idx, :, :, :] = image * 2.0 - 1.0\n filenames.append(os.path.basename(filepath))\n idx += 1\n if idx == batch_size:\n yield filenames, images\n filenames = []\n images = np.zeros(batch_shape)\n idx = 0\n if idx > 0:\n yield filenames, images",
"def load_images(input_dir, batch_shape):\n images = np.zeros(batch_shape)\n filenames = []\n idx = 0\n batch_size = batch_shape[0]\n for filepath in tf.gfile.Glob(os.path.join(input_dir, '*.png')):\n with tf.gfile.Open(filepath) as f:\n image = imread(f, mode='RGB').astype(np.float) / 255.0\n # Images for inception classifier are normalized to be in [-1, 1] interval.\n images[idx] = image * 2.0 - 1.0\n filenames.append(os.path.basename(filepath))\n idx += 1\n if idx == batch_size:\n yield filenames, images\n filenames = []\n images = np.zeros(batch_shape)\n idx = 0\n if idx > 0:\n yield filenames, images",
"def load_images(input_dir, batch_shape):\n images = np.zeros(batch_shape)\n filenames = []\n idx = 0\n batch_size = batch_shape[0]\n for filepath in tf.gfile.Glob(os.path.join(input_dir, '*.png')):\n with tf.gfile.Open(filepath) as f:\n image = np.array(Image.open(f).convert('RGB')).astype(np.float) / 255.0\n # Images for inception classifier are normalized to be in [-1, 1] interval.\n images[idx] = image * 2.0 - 1.0\n filenames.append(os.path.basename(filepath))\n idx += 1\n if idx == batch_size:\n yield filenames, images\n filenames = []\n images = np.zeros(batch_shape)\n idx = 0\n if idx > 0:\n yield filenames, images",
"def load_sprites(dir=\"/home/robin/workspace/python/ipt/chess/sprites\"):\n arr = []\n chdir(dir)\n for i in range(12):\n img = mimg.imread(\"sprite_\"+\"{:0>2d}\".format(i)+\".png\")\n arr.append(img)\n return arr",
"def load_images(self, files, sub_dir):\n\n for f in files:\n self.images.append(Image(f, sub_dir))",
"def load_images(pool, entries):\n start = time.perf_counter()\n images = pool.map(ski.io.imread, [x.path for x in entries])\n logger.info(\"Loaded %i images:\", len(images))\n util.pprint_log([x.name for x in entries], logger.info)\n logger.info(util.elapsed(start))\n logger.info(\"\\n\")\n return images",
"def load_images(folder_path, num_images):\n imgs = np.zeros(shape=[num_images, 400, 400, 3])\n for i in range(1, num_images + 1):\n image_name = \"satImage_%.3d\" % i\n image_path = folder_path + image_name + \".png\"\n if os.path.isfile(image_path):\n print('Loading ' + image_path)\n img = mpimg.imread(image_path)\n\n #imgs[i - 1] = np.asarray(img).reshape(400, 400, 3)\n imgs[i - 1] = img.reshape(400, 400, 3)\n else:\n print('File ' + image_path + ' does not exist')\n return imgs",
"def load_images(input_dir, batch_shape):\n images = np.zeros(batch_shape)\n filenames = []\n existing_dirs = [os.path.basename(dir) for dir in os.listdir(FLAGS.output_dir)]\n idx = 0\n batch_size = batch_shape[0]\n for filepath in tf.gfile.Glob(os.path.join(input_dir, '*.JPEG')):\n with tf.gfile.Open(filepath, 'rb') as f:\n image = np.array(Image.open(f).resize([FLAGS.image_height, FLAGS.image_width]).convert('RGB')).astype(np.float) / 255.0\n # Images for inception classifier are normalized to be in [-1, 1] interval.\n images[idx, :, :, :] = image * 2.0 - 1.0\n if os.path.basename(os.path.normpath(input_dir))=='*':\n head, tail = os.path.split(filepath)\n dirname=os.path.basename(head)\n if dirname in existing_dirs:\n continue\n filename = os.path.join(dirname, tail)\n else:\n filename = os.path.basename(filepath)\n filenames.append(filename)\n idx += 1\n if idx == batch_size:\n yield filenames, images\n filenames = []\n images = np.zeros(batch_shape)\n idx = 0\n if idx > 0:\n yield filenames, images"
] | [
"0.67336595",
"0.6599813",
"0.6416653",
"0.63725936",
"0.61642754",
"0.6014441",
"0.5978408",
"0.59570676",
"0.5940081",
"0.5896447",
"0.58877945",
"0.5837525",
"0.5792128",
"0.5787893",
"0.5751572",
"0.57426775",
"0.5739392",
"0.57090163",
"0.57005966",
"0.56994253",
"0.56881416",
"0.5686692",
"0.5685227",
"0.568453",
"0.5681492",
"0.568129",
"0.56698793",
"0.5639855",
"0.5638527",
"0.56049585"
] | 0.81790555 | 0 |
If a resource has a title, it should be included in the string representation. | def test_str_with_title(media_resource_factory):
resource = media_resource_factory(title="Test Resource")
assert str(resource) == f"{resource.id} ({resource.title})" | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def resource_link_title(self):\n return self.request.POST.get(\"resource_link_title\", self.resource_link_id)",
"def get_resource_details (self):\n return (f\"[Title:\\\"{self.get_title()}\\\"] [Author:{self.get_author()}] [Publisher:{self.get_publisher()}] [Year:{self.get_year()}]\")",
"def res_title(self):\n return self.get(\"res_title\", default=None, decode=True)",
"def short_title(self):\n if hasattr(self, \"title\"):\n return self.title\n else:\n return \"\"",
"def __str__(self):\n return \"{title}\".format(title=self.title)",
"def get_title(self):\n if not hasattr(self, '_title'):\n self._title = 'NO TITLE'\n if self._title:\n title = _(self._title)\n title = title.replace('&', '&') \n title = title.replace('\"', '"')\n return title\n else:\n return u''",
"def get_title_repr(self) -> str:\n try:\n return Title[self.title].value\n except (KeyError, ValueError):\n pass",
"def __str__(self):\n return str(self.title)",
"def inclusive_title(self):\n return self.title + (\" %s\" % (self.episode_to_string(self.latest_season, self.latest_episode),) if self.is_series() else \"\")",
"def title_string(self):\n return ' '.join(self.title).replace(' - ', '')",
"def __str__(self):\n \n return self.title",
"def format_title(self, data):\n return data",
"def safe_title(self):\n try:\n return self.title\n except ObjectDoesNotExist:\n return None",
"def title(self, obj):\n return str(obj)",
"def test_str_no_title(media_resource_factory):\n resource = media_resource_factory()\n\n assert str(resource) == str(resource.id)",
"def get_title(self) -> str:\n pass",
"def get_title():",
"def name_with_title(self):\n return \"%s %s\" % (self.title, self.name)",
"def complete_alt_title(self, obj):\n return str(obj)",
"def __str__(self):\n\t\treturn self.title",
"def __str__(self):\n\t\treturn self.title",
"def __str__(self):\n return self.title",
"def __str__(self):\n return self.title",
"def __str__(self):\n return self.title",
"def __str__(self):\n return self.title",
"def __str__(self):\n return self.title",
"def __str__(self):\n return self.title",
"def __str__(self):\n return self.title",
"def __str__(self):\n return self.title",
"def __str__(self):\n return self.title"
] | [
"0.68804574",
"0.6865952",
"0.6629121",
"0.6599802",
"0.6590698",
"0.6551943",
"0.6400065",
"0.6386993",
"0.6346398",
"0.632626",
"0.63220584",
"0.63117653",
"0.6293808",
"0.62767816",
"0.6276447",
"0.62574726",
"0.6230404",
"0.61885935",
"0.6185459",
"0.61847913",
"0.61847913",
"0.6176147",
"0.6176147",
"0.6176147",
"0.6176147",
"0.6176147",
"0.6176147",
"0.6176147",
"0.6176147",
"0.6176147"
] | 0.7142992 | 0 |
Media resources should be ordered by creation time, ascending. | def test_ordering(media_resource_factory):
m1 = media_resource_factory()
m2 = media_resource_factory()
assert list(models.MediaResource.objects.all()) == [m1, m2] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_sorted_img_list():\n dirPath=settings.BASE_DIR\n imgdir=\"/pttWeb/static/topicmodel\"\n fileID=glob.glob(dirPath+imgdir+\"/*.png\")\n fileID=[i.replace('/home/stream/Documents/minimum_django/pttWeb/static/','') for i in fileID]\n fileID=[Week_Image(i) for i in fileID]\n fileID.sort(key=lambda x: x.date, reverse=True)\n #translate . to / since javascript parsing date has some issue!\n fileID=[(i.filename,date_trans_z(i.date.strftime(\"%Y.%m.%d\"))) for i in fileID]\n return fileID",
"def filter(self):\n for f in FileHelper.ALL_PATHS:\n media_obj = MediaObject(FileHelper.get_url(f), FileHelper.get_title(f), FileHelper.get_media_type(f), FileHelper.get_icon(f), FileHelper.get_duration(f), FileHelper.get_ctype(f))\n _id = media_obj.uuid\n if media_obj.media_type == \"image\":\n DB.IMAGES[_id] = media_obj\n elif media_obj.media_type == \"audio\":\n DB.MUSIC[_id] = media_obj\n elif media_obj.media_type == \"video\":\n DB.VIDEOS[_id] = media_obj\n else:\n print \"File '%s' doesn't play nice.\" % (f)",
"def __get_sorted_file_list(self):\n d = self.__view.CurrentImgDir\n list = os.listdir(d)\n if self.__view.SortType == constant.THUMB_SORT_FILENAME:\n # Sort by Name\n list.sort()\n if self.__view.SortType == 2:\n # Sort by Size\n list.sort(lambda a, b: int(os.stat(os.path.join(d,a))[stat.ST_SIZE] - os.stat(os.path.join(d,b))[stat.ST_SIZE])) \n return list",
"def test_get_resources_ordered(db_session):\n query_params = {\n \"sort\": \"-album_id,title\"\n }\n parser = ModelQueryParamParser(query_params)\n album_resource = AlbumResource(session=db_session)\n result = album_resource.get_collection(\n filters=parser.parse_filters(album_resource.model),\n sorts=parser.parse_sorts()\n )\n assert len(result) == 347\n assert result[0][\"album_id\"] == 347",
"def get_list(self ):\n headers = { 'Authorization' : self.client.authorization_header }\n response = requests.get(\n self.client.url + '/media', \n headers = headers\n )\n\n return json.loads(response.text)",
"def ordered_images(self):\n return self.images.order_by('story_images__id')",
"def listall(self):\n list_query = \"\"\"SELECT * FROM %s\"\"\" % MediaCollection.COLLECTIONS_TABLE\n self.cursor.execute(list_query)\n return [Media.fromtuple(media) for media in self.cursor.fetchall()]",
"def get_guid_objects(queue, media_base, absolute=False):\n index = 1\n guid_map = {}\n object_map = {}\n if absolute:\n image_base = \"{0}\".format(media_base) + \"/media/{0}/{1}\"\n else:\n image_base = \"./media/{0}/{1}\"\n logging.info(\"Collecting GUID object information\")\n for file_object in os.scandir(media_base + \"/metadata/guid\"):\n if not file_object.is_file():\n continue\n with open(file_object.path, \"r\") as toml_file:\n metadata = toml.load(toml_file)\n base_name = os.path.basename(metadata[\"image\"])\n image_name = image_base.format(metadata[\"type\"], base_name)\n image_extension = image_name.split(\".\").pop(-1)\n\n picture_name = metadata[\"title\"]\n if picture_name == \"\":\n picture_name = \"Untitled\"\n\n object_id = \"@M{0}@\".format(index)\n object_entry = [\n \"0 {0} OBJE\".format(object_id),\n \"1 FILE {0}\".format(image_name),\n \"1 FORM {0}\".format(image_extension),\n \"1 TITL {0}\".format(picture_name),\n ]\n\n if metadata[\"type\"] == \"portrait\":\n object_entry.append(\"1 TYPE Photo\")\n else:\n object_entry.append(\"1 TYPE {0}\".format(metadata[\"type\"].title()))\n\n if \"url\" in metadata and metadata[\"url\"] != \"\":\n object_entry.append(\"1 NOTE {0}\".format(metadata[\"url\"]))\n\n if \"facts\" in metadata:\n facts = metadata[\"facts\"]\n for key in [\"description\", \"transcription\"]:\n if key in facts and facts[key] != \"\":\n note = build_note(facts[key], 1)\n if len(note) > 0:\n for item in note:\n object_entry.append(item)\n\n object_map.update({object_id: object_entry})\n guid_map.update({metadata[\"guid\"]: object_id})\n index = index + 1\n if index > 99999:\n logging.error(\"100000 GUID objects not supported, APID range starts there\")\n sys.exit(1)\n logging.info(\"GUID object collection completed\")\n queue.put((guid_map, object_map))",
"def list_media(storage, filter_list):\n results = []\n total = 0\n try:\n for media in storage.listdir('.')[1]:\n if not media.endswith('/') and media != \"\":\n location = storage.url(media).split('?')[0]\n total += 1\n if not filter_list or location in filter_list:\n results += [\n {'location': location,\n 'tags': MediaTag.objects.filter(\n location=location).values_list(\n 'tag', flat=True)\n }]\n except OSError:\n LOGGER.exception(\n \"Unable to list objects in %s.\", storage.__class__.__name__)\n except S3ResponseError:\n LOGGER.exception(\n \"Unable to list objects in %s bucket.\", storage.bucket_name)\n return {'count': total, 'results': results}",
"def get_apid_objects(queue, media_base, args, absolute=False):\n work_lock = Lock()\n work_queue = Queue()\n\n readers = os.cpu_count()\n read_lock = Lock()\n read_queue = Queue()\n read_processes = []\n for number in range(readers):\n read_process = Process(\n target=read_apids, args=(read_queue, read_lock, work_queue, work_lock)\n )\n read_process.start()\n read_processes.append(read_process)\n\n logging.info(\"Collecting APID object information\")\n file_list = []\n file_total = 0\n for file_object in os.scandir(media_base + \"/metadata/apid\"):\n if not file_object.is_file():\n continue\n file_list.append({\"fileName\": file_object.path})\n file_total = file_total + 1\n\n read_lock.acquire()\n for item in file_list:\n read_queue.put(item)\n for item in read_processes:\n read_queue.put({\"exit\": True})\n read_lock.release()\n\n index = 100000\n apid_image_map = {}\n apid_screenshot_map = {}\n apid_full_map = {}\n object_map = {}\n image_cache = {}\n item_count = 0\n if absolute:\n image_base = \"{0}\".format(media_base) + \"/media/{0}\"\n else:\n image_base = \"./media/{0}\"\n while True:\n work_lock.acquire()\n if not work_queue.empty():\n metadata = work_queue.get()\n work_lock.release()\n else:\n work_lock.release()\n time.sleep(0.01)\n continue\n\n item_count = item_count + 1\n apid_full_map.update({metadata[\"apid\"]: metadata})\n if \"image\" in metadata:\n if metadata[\"image\"] not in image_cache:\n base_name = metadata[\"image\"].split(\"/media/\").pop(1)\n image_name = image_base.format(base_name)\n image_extension = image_name.split(\".\").pop(-1)\n\n object_id = \"@M{0}@\".format(index)\n object_entry = [\n \"0 {0} OBJE\".format(object_id),\n \"1 FILE {0}\".format(image_name),\n \"1 FORM {0}\".format(image_extension),\n \"1 TYPE document\",\n ]\n\n object_map.update({object_id: object_entry})\n image_cache.update({metadata[\"image\"]: object_id})\n index = index + 1\n else:\n object_id = image_cache[metadata[\"image\"]]\n apid_image_map.update({metadata[\"apid\"]: object_id})\n if \"screenshot\" in metadata:\n base_name = os.path.basename(metadata[\"screenshot\"])\n image_name = image_base.format(\"apid\") + \"/\" + base_name\n image_extension = image_name.split(\".\").pop(-1)\n\n if \"title\" in metadata and metadata[\"title\"] != \"\":\n title = metadata[\"title\"]\n else:\n title = \"Ancestry.com Source Record, {0}\".format(metadata[\"apid\"])\n\n object_id = \"@M{0}@\".format(index)\n object_entry = [\n \"0 {0} OBJE\".format(object_id),\n \"1 FILE {0}\".format(image_name),\n \"1 FORM {0}\".format(image_extension),\n \"1 TITL {0}\".format(title),\n \"1 REFN {0}\".format(metadata[\"apid\"]),\n ]\n\n if \"url\" in metadata and metadata[\"url\"] != \"\":\n object_entry.append(\"1 NOTE {0}\".format(metadata[\"url\"]))\n\n object_map.update({object_id: object_entry})\n index = index + 1\n apid_screenshot_map.update({metadata[\"apid\"]: object_id})\n\n if item_count == file_total:\n break\n\n for read_process in read_processes:\n read_process.join()\n queue.put((apid_image_map, apid_screenshot_map, apid_full_map, object_map))\n logging.info(\"APID object collection completed\")",
"def recent_media(self):\r\n return media.RecentMedia(self)",
"def recent_media(self):\r\n return media.RecentMedia(self)",
"def recent_media(self):\r\n return media.RecentMedia(self)",
"def recent_media(self):\r\n return media.RecentMedia(self)",
"def location_medias_top_v1(\n self, location_pk: int, amount: int = 21\n ) -> List[Media]:\n return self.location_medias_v1(location_pk, amount, tab_key=\"ranked\")",
"def get_meta_of_files(session=konfuzio_session()) -> List[dict]:\n url = get_documents_meta_url()\n result = []\n\n while True:\n r = retry_get(session, url)\n data = r.json()\n if isinstance(data, dict) and 'results' in data.keys():\n result += data['results']\n if 'next' in data.keys() and data['next']:\n url = data['next']\n else:\n break\n else:\n result = data\n break\n\n sorted_documents = sorted(result, key=itemgetter('id'))\n return sorted_documents",
"def location_medias_recent_v1(\n self, location_pk: int, amount: int = 63\n ) -> List[Media]:\n return self.location_medias_v1(location_pk, amount, tab_key=\"recent\")",
"def recent_media(self):\n return media.RecentMedia(self)",
"def ordered(cls, objs):\n objs = list(objs)\n try:\n objs.sort(key=lambda o: o.latest_message.created, reverse=True)\n except:\n pass\n return objs",
"def __count_media_files(self, CurrentDir):\n self.__counter_lock.acquire()\n self.media_counter = [0, 0]\n self.__counter_lock.release()\n if os.path.exists(CurrentDir):\n for i in os.listdir(CurrentDir):\n MediaType = self.thumb_filter(CurrentDir,i)\n if MediaType == TYPE_PHOTO:\n self.__counter_lock.acquire()\n self.media_counter[0] += 1\n self.__counter_lock.release()\n elif MediaType == TYPE_VIDEO:\n self.__counter_lock.acquire()\n self.media_counter[1] += 1 \n self.__counter_lock.release()",
"def getMediaFiles(path):\n fileList = getMediaFileList(path)\n # dirList = getDirectoryList(path)\n\n # results = map(getMediaFiles, dirList)\n\n # for result in results:\n # fileList = fileList + result\n\n return fileList",
"def get_real_media(self, provider_name):\n return [Media(f, provider_name) for f in self.videos]",
"def test_basic_functionality(self):\n self.assertEqual(mpmodels.MediaItem.objects.count(), 0)\n video = make_video(media_id='1234', title='test title')\n set_resources_and_sync([video])\n self.assertEqual(mpmodels.MediaItem.objects.count(), 1)\n item = mpmodels.MediaItem.objects.get(jwp__key=video.key)\n self.assertEqual(item.title, 'test title')",
"def get_json_media(self, provider_name):\n return [JsonMedia(f, provider_name) for f in self.datafiles[provider_name]]",
"def get_queryset(self):\n samples = AudioSample.objects.distinct()\n if samples:\n return samples.filter(\n pub_date__lte=timezone.now()\n ).order_by('-pub_date')\n else:\n return []",
"def shuffled_thumbnails(self):\n while True:\n video_id = random.choice(self.video_ids)\n metadata = self._id_to_meta[video_id]\n thumbs = [th for th in self.video_thumbnails(video_id)]\n if thumbs:\n yield random.choice(thumbs) + (metadata,)",
"def location_medias_recent_a1(\n self, location_pk: int, amount: int = 24, sleep: float = 0.5\n ) -> List[Media]:\n return self.location_medias_a1(\n location_pk, amount, sleep=sleep, tab_key=\"edge_location_to_media\"\n )",
"def _release_last_resources(self):\n last_resources, self.http_resources = self.http_resources[:], []\n return last_resources",
"def test_get(self):\n fields = {\n 'title': 'Test media title',\n 'description': 'Test media description',\n 'local_media_file': 'test_64K_short.mp3',\n }\n\n media_filename = \"%s/%s\" % (settings.get('base', 'path.local.media'), fields['local_media_file'])\n m = Media.create(\n client=self.client,\n media_filename=media_filename,\n title=fields['title'],\n description=fields['description'],\n )\n\n m = Media.get(client=self.client, uuid=m.uuid)\n assert m.title == fields['title']\n assert m.description == fields['description']",
"def mediaGenerator(request):\n folder = 'content/' + request\n mediaPaths = glob(folder + '/*')\n return random.choice(mediaPaths)"
] | [
"0.61875653",
"0.60358816",
"0.59814835",
"0.5946629",
"0.5863695",
"0.57058465",
"0.5681557",
"0.5633815",
"0.55823237",
"0.5550474",
"0.5542779",
"0.5542779",
"0.5542779",
"0.5542779",
"0.5485966",
"0.54624945",
"0.544271",
"0.5394488",
"0.5365429",
"0.5348334",
"0.5256054",
"0.52526796",
"0.52368546",
"0.5232301",
"0.52296406",
"0.52292305",
"0.5228468",
"0.5222687",
"0.5209426",
"0.51953876"
] | 0.65957546 | 0 |
If a media resource has both an image and YouTube video ID specified then cleaning it should throw an error. | def test_clean_both_image_and_youtube_id(image):
resource = models.MediaResource(image=image, youtube_id="dQw4w9WgXcQ")
with pytest.raises(ValidationError):
resource.clean() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_clean_no_image_or_youtube_id():\n resource = models.MediaResource()\n\n with pytest.raises(ValidationError):\n resource.clean()",
"def test_clean_only_youtube_id():\n resource = models.MediaResource(youtube_id=\"dQw4w9WgXcQ\")\n\n resource.clean()",
"def test_clean_only_image(image):\n resource = models.MediaResource(image=image)\n\n resource.clean()",
"def verify_media(self):\n self.check_dataset_duplicate_ids(self.media)",
"def test_video_delete(self):\n v1, v2 = make_video(media_id='1234'), make_video(media_id='2345')\n set_resources_and_sync([v1, v2])\n i1 = mpmodels.MediaItem.objects.get(jwp__key=v1.key)\n i2 = mpmodels.MediaItem.objects.get(jwp__key=v2.key)\n set_resources_and_sync([v1])\n self.assertIsNone(mpmodels.MediaItem.objects.get(id=i1.id).deleted_at)\n self.assertIsNotNone(mpmodels.MediaItem.objects_including_deleted.get(id=i2.id).deleted_at)\n self.assertFalse(mpmodels.MediaItem.objects.filter(id=i2.id).exists())",
"def _handle_removed_media(self):\r\n if self.has_media():\r\n try:\r\n image = str(self.image)\r\n os.remove(image)\r\n except OSError:\r\n raise('Failure trying to remove image from filesystem.')\r\n return True",
"def clean_video_id(self):\n failed = False\n d = self.cleaned_data\n service = d.get('service')\n # Get the video id and clear whitespace on either side.\n video_id = d.get('video_id', '').strip()\n\n # Validate using YouTube's API:\n if service == 'youtube':\n url = ('http://gdata.youtube.com/feeds/api/videos/{}?alt=json'.\n format(video_id))\n data = requests.get(url)\n # Ensure we can parse the JSON data.\n try:\n json = simplejson.loads(data.text)\n # If not, mark this as a failure.\n except ValueError:\n failed = True\n\n # Validate using Vimeo's API:\n elif service == 'vimeo':\n data = requests.get('http://vimeo.com/api/v2/video/{}.json'.\n format(video_id))\n # Ensure we can parse the JSON data.\n try:\n json = simplejson.loads(data.text)\n # If not, mark this as a failure.\n except ValueError:\n failed = True\n\n # Respond based on the outcome.\n if failed:\n message = _(\"Couldn't validate video id using {} API. Please \"\n \"verify it exists and check for \"\n \"typos.\".format(service))\n raise forms.ValidationError(message)\n\n return video_id",
"def delete_video(self, video_ID): # WORKS\n try:\n self.cur.execute(\"DELETE FROM videos WHERE video_ID = \\\"{}\\\"\".format(video_ID))\n self.db.commit()\n os.remove('static/videos/' + str(video_ID) + '.mp4')\n os.remove('static/images/' + str(video_ID) + '.jpg')\n except:\n self.db.rollback()",
"def test_parse_youtube_invalid(self):\r\n\r\n # invalid id\r\n youtube_str = 'thisisaninvalidid'\r\n output = VideoDescriptor._parse_youtube(youtube_str)\r\n self.assertEqual(output, {'0.75': '',\r\n '1.00': '',\r\n '1.25': '',\r\n '1.50': ''})\r\n # another invalid id\r\n youtube_str = ',::,:,,'\r\n output = VideoDescriptor._parse_youtube(youtube_str)\r\n self.assertEqual(output, {'0.75': '',\r\n '1.00': '',\r\n '1.25': '',\r\n '1.50': ''})\r\n\r\n # and another one, partially invalid\r\n youtube_str = '0.75_BAD!!!,1.0:AXdE34_U,1.25:KLHF9K_Y,1.5:VO3SxfeD,'\r\n output = VideoDescriptor._parse_youtube(youtube_str)\r\n self.assertEqual(output, {'0.75': '',\r\n '1.00': 'AXdE34_U',\r\n '1.25': 'KLHF9K_Y',\r\n '1.50': 'VO3SxfeD'})",
"def test_empty_media(self):\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n manifest['job']['interface']['inputs']['files'][0]['mediaTypes'] = []\n config = copy.deepcopy(self.configuration)\n json_data = {\n 'manifest': manifest,\n 'configuration': config\n }\n\n url = '/%s/job-types/validation/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertDictEqual(results, {u'errors': [], u'is_valid': True, u'warnings': []})\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n del manifest['job']['interface']['inputs']['files'][0]['mediaTypes']\n config = copy.deepcopy(self.configuration)\n json_data = {\n 'manifest': manifest,\n 'configuration': config\n }\n\n url = '/%s/job-types/validation/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertDictEqual(results, {u'errors': [], u'is_valid': True, u'warnings': []})",
"def test_no_video_image(self):\n edx_video_id = 'test1'\n get_videos_url = reverse_course_url('videos_handler', self.course.id)\n video_image_upload_url = self.get_url_for_course_key(self.course.id, {'edx_video_id': edx_video_id})\n with make_image_file(\n dimensions=(settings.VIDEO_IMAGE_MIN_WIDTH, settings.VIDEO_IMAGE_MIN_HEIGHT),\n ) as image_file:\n self.client.post(video_image_upload_url, {'file': image_file}, format='multipart')\n\n val_image_url = get_course_video_image_url(course_id=self.course.id, edx_video_id=edx_video_id)\n\n response = self.client.get_json(get_videos_url)\n self.assertEqual(response.status_code, 200)\n response_videos = json.loads(response.content.decode('utf-8'))[\"videos\"]\n for response_video in response_videos:\n if response_video['edx_video_id'] == edx_video_id:\n self.assertEqual(response_video['course_video_image_url'], val_image_url)\n else:\n self.assertEqual(response_video['course_video_image_url'], None)",
"def _sanitize_resources(cls, resources):\n\n try:\n for resource in cls._loop_raw(resources):\n cls._sanitize_resource(resource)\n except (KeyError, TypeError):\n _LOGGER.debug(\"no shade data available\")\n return None",
"def test_type_youtube():\n resource = models.MediaResource(youtube_id=\"dQw4w9WgXcQ\")\n\n assert resource.type == models.MediaResource.TYPE_YOUTUBE",
"def test_recreate_deleted_item(self):\n v1 = make_video(media_id='1234', title='testing')\n set_resources_and_sync([v1])\n i1 = mpmodels.MediaItem.objects.filter(jwp__key=v1.key).first()\n self.assertIsNotNone(i1)\n self.assertEqual(i1.title, 'testing')\n i1.delete()\n\n set_resources_and_sync([v1])\n i1 = mpmodels.MediaItem.objects.filter(jwp__key=v1.key).first()\n self.assertIsNotNone(i1)\n self.assertEqual(i1.title, 'testing')",
"def test_video_removal(self):\n edx_video_id = 'test1'\n remove_url = self.get_url_for_course_key(self.course.id, {'edx_video_id': edx_video_id})\n response = self.client.delete(remove_url, HTTP_ACCEPT=\"application/json\")\n self.assertEqual(response.status_code, 204)\n\n self._assert_video_removal(self.url, edx_video_id, 1)",
"def test_only_sms_created(self):\n v1, v2 = make_video(media_id='1234'), make_video()\n set_resources_and_sync([v1, v2])\n i1 = mpmodels.MediaItem.objects.filter(jwp__key=v1.key).first()\n self.assertIsNotNone(i1)\n i2 = mpmodels.MediaItem.objects.filter(jwp__key=v2.key).first()\n self.assertIsNone(i2)",
"def test_video_image_validation_message(self, image_data, error_message):\n edx_video_id = 'test1'\n video_image_upload_url = self.get_url_for_course_key(self.course.id, {'edx_video_id': edx_video_id})\n with make_image_file(\n dimensions=(\n image_data.get('width', settings.VIDEO_IMAGE_MIN_WIDTH),\n image_data.get('height', settings.VIDEO_IMAGE_MIN_HEIGHT)\n ),\n prefix=image_data.get('prefix', 'videoimage'),\n extension=image_data.get('extension', '.png'),\n force_size=image_data.get('size', settings.VIDEO_IMAGE_SETTINGS['VIDEO_IMAGE_MIN_BYTES'])\n ) as image_file:\n response = self.client.post(video_image_upload_url, {'file': image_file}, format='multipart')\n if error_message:\n self.verify_error_message(response, error_message)\n else:\n self.verify_image_upload_reponse(self.course.id, edx_video_id, response)",
"def test_str_no_title(media_resource_factory):\n resource = media_resource_factory()\n\n assert str(resource) == str(resource.id)",
"def test_upload_image_bad_request(self):\n url = image_upload_url(self.movie.id)\n res = self.client.post(url, {'image': 'notimage'}, format='multipart')\n\n self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)",
"def clean(self):\n self.clean_rally_conf()\n rally.RallyBase.clean_rally_logs()\n if self.image_alt:\n self.cloud.delete_image(self.image_alt)\n if self.flavor_alt:\n self.orig_cloud.delete_flavor(self.flavor_alt.id)\n super().clean()",
"def prepare_media(self, object):\n if object.media is not None:\n #return object.media.media_file.name\n return '/api/v1/media/{0}/'.format(object.media.id)\n else:\n return ''",
"def clear_images(self):\r\n\r\n with translate_errors():\r\n self.audio.clear_pictures()\r\n self.audio.save()\r\n\r\n super().clear_images()",
"def get_clean_image(image):\n if not image:\n return \"\"\n if \"music@\" in image:\n # fix for embedded images\n thumbcache = xbmc.getCacheThumbName(image).replace(\".tbn\", \".jpg\")\n thumbcache = \"special://thumbnails/%s/%s\" % (thumbcache[0], thumbcache)\n if not xbmcvfs.exists(thumbcache):\n xbmcvfs.copy(image, thumbcache)\n image = thumbcache\n if image and \"image://\" in image:\n image = image.replace(\"image://\", \"\")\n image = urllib.unquote(image.encode(\"utf-8\"))\n if image.endswith(\"/\"):\n image = image[:-1]\n if not isinstance(image, unicode):\n image = image.decode(\"utf8\")\n return image",
"def delete(self, mediaId):\n headers = { 'Authorization' : self.client.authorization_header }\n\n response = requests.delete(\n self.client.url + '/media/' + mediaId,\n headers = headers\n )\n\n #print 'Response: ' + response.text\n return json.loads(response.text)",
"def test_image_no_requiere_del_campos_description(self):\n self.image_obj.description = ''\n\n # Si no hay error, todo OK.\n self.image_obj.save()",
"def test_invalidate_removed_resource(self):\n workflow1 = self.get_workflow(\n \"\"\"file://file2 <- file://file1\n\nfile://file3 <- file://file1\n\"\"\")\n workflow2 = self.get_workflow(\n \"\"\"file://file3 <- file://file1\n\"\"\")\n\n invalid = workflow1.resources_not_created_the_same_way(workflow2)\n assert len(invalid) == 1, [(res.url, reason._reason) for (res, reason,) in invalid]\n (resource, invalidation_reason) = invalid[0]\n assert resource.url == \"file://file2\"\n assert invalidation_reason == NO_LONGER_CREATED, invalidation_reason",
"def extract_media_v1(data):\n user = data[\"user\"]\n location = data.get(\"location\")\n if location:\n location = {\"pk\": int(location.get(\"pk\")), \"name\": location.get(\"name\")}\n video_url = \"\"\n if \"video_versions\" in data:\n # Select Best Quality by Resolutiuon\n video_url = sorted(\n data[\"video_versions\"], key=lambda o: o[\"height\"] * o[\"width\"]\n ).pop()[\"url\"]\n product_type = data.get(\"product_type\", \"\")\n if data[\"media_type\"] == 2 and not product_type:\n product_type = \"feed\"\n thumbnail_url = ''\n if 'image_versions2' in data:\n thumbnail_url = sorted(\n data[\"image_versions2\"][\"candidates\"],\n key=lambda o: o[\"height\"] * o[\"width\"],\n ).pop()[\"url\"]\n return {\n \"pk\": int(data[\"pk\"]),\n \"taken_at\": int(data[\"taken_at\"]),\n \"id\": data[\"id\"],\n \"media_type\": data[\"media_type\"],\n \"product_type\": product_type,\n \"code\": data[\"code\"],\n \"thumbnail_url\": thumbnail_url,\n \"location\": location,\n \"user\": extract_user_short(user),\n \"comment_count\": int(data.get(\"comment_count\") or 0),\n \"like_count\": int(data.get(\"like_count\") or 0), # the media just published has no like_count\n \"caption_text\": json_value(data, \"caption\", \"text\", default=\"\"),\n \"usertags\": [\n extract_usertag(usertag)\n for usertag in data.get(\"usertags\", {}).get(\"in\", [])\n ],\n \"video_url\": video_url,\n \"view_count\": int(data.get('view_count') or 0),\n \"video_duration\": data.get('video_duration'),\n \"title\": data.get(\"title\") or None,\n \"resources\": [\n extract_resource_v1(edge)\n for edge in data.get('carousel_media', [])\n ]\n }",
"async def _apply_media_retention_rules(self) -> None:\n # Purge remote media\n if self._media_retention_remote_media_lifetime_ms is not None:\n # Calculate a threshold timestamp derived from the configured lifetime. Any\n # media that has not been accessed since this timestamp will be removed.\n remote_media_threshold_timestamp_ms = (\n self.clock.time_msec() - self._media_retention_remote_media_lifetime_ms\n )\n\n logger.info(\n \"Purging remote media last accessed before\"\n f\" {remote_media_threshold_timestamp_ms}\"\n )\n\n await self.delete_old_remote_media(\n before_ts=remote_media_threshold_timestamp_ms\n )\n\n # And now do the same for local media\n if self._media_retention_local_media_lifetime_ms is not None:\n # This works the same as the remote media threshold\n local_media_threshold_timestamp_ms = (\n self.clock.time_msec() - self._media_retention_local_media_lifetime_ms\n )\n\n logger.info(\n \"Purging local media last accessed before\"\n f\" {local_media_threshold_timestamp_ms}\"\n )\n\n await self.delete_old_local_media(\n before_ts=local_media_threshold_timestamp_ms,\n keep_profiles=True,\n delete_quarantined_media=False,\n delete_protected_media=False,\n )",
"def delete_thumbnail(self, thumbnail_name):",
"def __sanitize_input(self):\n self.__check_for_video_file()\n self.__manage_output_folder()"
] | [
"0.7774484",
"0.7352817",
"0.6742315",
"0.6415747",
"0.59018755",
"0.5872863",
"0.58247",
"0.57007104",
"0.55338883",
"0.5439113",
"0.54177034",
"0.53636616",
"0.5305156",
"0.53002834",
"0.52959025",
"0.51120865",
"0.51028275",
"0.50996584",
"0.5081952",
"0.5054293",
"0.5038761",
"0.50347155",
"0.50297874",
"0.50283825",
"0.5011697",
"0.4982682",
"0.49596292",
"0.49486077",
"0.4939692",
"0.49360138"
] | 0.78584284 | 0 |
If a media resource does not encapsulate any media, cleaning it should throw an error. | def test_clean_no_image_or_youtube_id():
resource = models.MediaResource()
with pytest.raises(ValidationError):
resource.clean() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _handle_removed_media(self):\r\n if self.has_media():\r\n try:\r\n image = str(self.image)\r\n os.remove(image)\r\n except OSError:\r\n raise('Failure trying to remove image from filesystem.')\r\n return True",
"def test_clean_only_image(image):\n resource = models.MediaResource(image=image)\n\n resource.clean()",
"def test_clean_both_image_and_youtube_id(image):\n resource = models.MediaResource(image=image, youtube_id=\"dQw4w9WgXcQ\")\n\n with pytest.raises(ValidationError):\n resource.clean()",
"def _finalize(self):\n if self.url and self.url.startswith('file://'):\n self.parse_external_files(self.url[7:])\n Media._finalize(self)",
"def test_clean_only_youtube_id():\n resource = models.MediaResource(youtube_id=\"dQw4w9WgXcQ\")\n\n resource.clean()",
"def verify_media(self):\n self.check_dataset_duplicate_ids(self.media)",
"def test_empty_media(self):\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n manifest['job']['interface']['inputs']['files'][0]['mediaTypes'] = []\n config = copy.deepcopy(self.configuration)\n json_data = {\n 'manifest': manifest,\n 'configuration': config\n }\n\n url = '/%s/job-types/validation/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertDictEqual(results, {u'errors': [], u'is_valid': True, u'warnings': []})\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n del manifest['job']['interface']['inputs']['files'][0]['mediaTypes']\n config = copy.deepcopy(self.configuration)\n json_data = {\n 'manifest': manifest,\n 'configuration': config\n }\n\n url = '/%s/job-types/validation/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertDictEqual(results, {u'errors': [], u'is_valid': True, u'warnings': []})",
"def delete_media():\n return Response(\"{}\", status=200, mimetype='application/json')\n message_status = request.form[\"MessageStatus\"]\n\n if (message_status == 'delivered'):\n MessageSid = request.form[\"MessageSid\"]\n MediaSid = request.form[\"MediaUrl0\"].split('/')[-1]\n delete_media_file(MessageSid, MediaSid)",
"def test_media_attribute_blows_up():\n with pytest.raises(AssertionError):\n MediaBag().media",
"async def _apply_media_retention_rules(self) -> None:\n # Purge remote media\n if self._media_retention_remote_media_lifetime_ms is not None:\n # Calculate a threshold timestamp derived from the configured lifetime. Any\n # media that has not been accessed since this timestamp will be removed.\n remote_media_threshold_timestamp_ms = (\n self.clock.time_msec() - self._media_retention_remote_media_lifetime_ms\n )\n\n logger.info(\n \"Purging remote media last accessed before\"\n f\" {remote_media_threshold_timestamp_ms}\"\n )\n\n await self.delete_old_remote_media(\n before_ts=remote_media_threshold_timestamp_ms\n )\n\n # And now do the same for local media\n if self._media_retention_local_media_lifetime_ms is not None:\n # This works the same as the remote media threshold\n local_media_threshold_timestamp_ms = (\n self.clock.time_msec() - self._media_retention_local_media_lifetime_ms\n )\n\n logger.info(\n \"Purging local media last accessed before\"\n f\" {local_media_threshold_timestamp_ms}\"\n )\n\n await self.delete_old_local_media(\n before_ts=local_media_threshold_timestamp_ms,\n keep_profiles=True,\n delete_quarantined_media=False,\n delete_protected_media=False,\n )",
"def clear_images(self):\r\n\r\n with translate_errors():\r\n self.audio.clear_pictures()\r\n self.audio.save()\r\n\r\n super().clear_images()",
"def clean_old_data():\n logger.info('Cleaning standalone files on disk...')\n for absolute_path in glob.glob(MEDIA_URL + '*'):\n file_name = os.path.basename(absolute_path)\n try:\n relative_path = os.path.join(AUDIOS_URL, file_name)\n audio = Audio.objects.get(filename=relative_path)\n if audio.get_type() == 'episode':\n try:\n # If there are inactive audios on its being\n for e in audio.podcast.episode_set.exclude(pk=audio.podcast.active_episode.pk):\n if not e.is_active():\n logger.info('Inactive audio found in podcast set. Erasing files.')\n e.delete_files()\n except Exception, e:\n logger.exception(e.message)\n except ObjectDoesNotExist, e:\n logger.info('A file with no audio registered in database')\n if os.path.isfile(relative_path):\n logger.info('Erasing: %s' % relative_path)\n os.remove(relative_path)\n logger.info('... Done.')",
"def delete(self, mediaId):\n headers = { 'Authorization' : self.client.authorization_header }\n\n response = requests.delete(\n self.client.url + '/media/' + mediaId,\n headers = headers\n )\n\n #print 'Response: ' + response.text\n return json.loads(response.text)",
"def test_media_attribute_is_fine_after_being_set():\n b = MediaBag()\n b.media = None\n assert b.media is None",
"def delete_media_file(MessageSid, MediaSid):\n delete_content = client.messages(MessageSid).media(MediaSid).delete()\n if (delete_content == True):\n print(\"Content deleted\", MessageSid, MediaSid)\n else:\n print(\"Content NOT deleted\")",
"def cleanup_old_backups(self):\n print(\"Cleaning Old Backups for media files\")\n\n file_list = utils.get_backup_file_list(\n self.get_databasename(),\n self.get_servername(),\n 'media.tar.gz',\n self.storage\n )\n\n for backup_date, filename in file_list[0:-dbbackup_settings.CLEANUP_KEEP_MEDIA]:\n if int(backup_date.strftime(\"%d\")) != 1:\n print(\" Deleting: %s\" % filename)\n self.storage.delete_file(filename)",
"def clean_error(self):\r\n return self._arm.clean_error()",
"def remove_media(media, window=None, gui_instance=None):\r\n\r\n cursor = connection.cursor()\r\n\r\n if media.isnumeric(): # CLI-only: The user has attempted to delete the media file based on its ID in the database\r\n cursor.execute(\"SELECT full_path FROM media WHERE id = \" + media)\r\n\r\n full_path = cursor.fetchone()\r\n\r\n if full_path is None: # The system couldn't find the specified ID\r\n print(\"Error: The specified ID does not exist in the database.\")\r\n return\r\n\r\n # Attempting to remove the media file record from the database\r\n try:\r\n cursor.execute(\"DELETE FROM media WHERE id = \" + media) # Deleting the record from the database\r\n\r\n connection.commit() # Writing the changes to the database\r\n\r\n except Error: # Database is locked\r\n print(\"\\nError when trying to commit changes to database. Make sure another application is not using the \"\r\n \"database.\")\r\n\r\n return False\r\n\r\n cursor.close()\r\n\r\n # Attempting to re-order the keys after the deleted one\r\n if not resort_keys(media): # Fatal error: database is locked\r\n print(\"\\nERROR: DATABASE COULD NOT BE UPDATED. APPLICATION CANNOT WORK AS INTENDED. \"\r\n \"PLEASE MANUALLY REMOVE ALL MEDIA FILES FROM THE MEDIA FOLDER AND TRY ADDING THEM BACK.\")\r\n sys.exit() # Quitting; the application will malfunction until the user manually resets the media folder\r\n\r\n try:\r\n os.remove(full_path[0].replace(\"\\\\\", \"/\")) # Removes the media file from the media folder\r\n\r\n except FileNotFoundError:\r\n print(\"\\nError: Could not remove the file from the media folder: The file does not exist.\")\r\n return False\r\n\r\n except PermissionError:\r\n print(\"\\nError: Unable to remove file from the media folder. Make sure you haven't selected a \"\r\n \"write-protected folder. If the issue persists, try changing the media folder and manually removing\"\r\n \" the media file from the current media folder.\")\r\n return False\r\n\r\n print(\"\\nThe media file has been removed.\")\r\n\r\n else: # The user is either using the GUI or has provided the filename as parameter\r\n # Getting the full path of the file (using an app-level convention for slashes)\r\n full_path = os.path.join(media_folder, os.path.basename(media)).replace(\"\\\\\", \"/\")\r\n\r\n if path.exists(full_path): # (CLI-only) Checking if the provided filename exists\r\n\r\n # Getting the id of the media which will be removed in order to re-order the IDs of the database\r\n cursor.execute(\"SELECT id FROM media WHERE full_path = \" + \"\\\"\" + full_path + \"\\\"\")\r\n id_value = cursor.fetchone()\r\n\r\n # Attempting to remove the media file record from the database\r\n try:\r\n cursor.execute(\"DELETE FROM media WHERE full_path = \" + \"\\\"\" + full_path + \"\\\"\")\r\n\r\n connection.commit() # Writing the changes to the database\r\n\r\n except Error: # Database is locked\r\n # Application is running in GUI-mode\r\n if gui_instance is not None:\r\n messagebox.showerror(\"Database is locked\", \"Error when trying to commit changes to database. Make \"\r\n \"sure another application is not using the database.\")\r\n\r\n # Application is running in CLI or debugging mode\r\n if config_var['RUN-MODE']['run_mode'] == \"1\" or config_var['RUN-MODE']['run_mode'] == \"2\":\r\n print(\"\\nError when trying to commit changes to database. Make sure another application is not \"\r\n \"using the database.\")\r\n\r\n return False\r\n\r\n cursor.close()\r\n\r\n # Attempting to re-order the keys after the deleted one\r\n if not resort_keys(id_value[0]): # Fatal error: database is locked\r\n # Application is running in GUI-mode\r\n if gui_instance is not None:\r\n messagebox.showerror(\"Database error\", \"DATABASE COULD NOT BE UPDATED. APPLICATION CANNOT WORK AS \"\r\n \"INTENDED. PLEASE MANUALLY REMOVE ALL MEDIA FILES FROM THE MEDIA FOLDER AND \"\r\n \"TRY ADDING THEM BACK.\")\r\n # Quitting; the application will malfunction until the user manually resets the media folder\r\n sys.exit()\r\n\r\n # Application is running in CLI or debugging mode\r\n if config_var['RUN-MODE']['run_mode'] == \"1\" or config_var['RUN-MODE']['run_mode'] == \"2\":\r\n print(\"\\nERROR: DATABASE COULD NOT BE UPDATED. APPLICATION CANNOT WORK AS INTENDED. \"\r\n \"PLEASE MANUALLY REMOVE ALL MEDIA FILES FROM THE MEDIA FOLDER AND TRY ADDING THEM BACK.\")\r\n # Quitting; the application will malfunction until the user manually resets the media folder\r\n sys.exit()\r\n\r\n try:\r\n os.remove(full_path) # Removes the media file from the media folder\r\n\r\n except FileNotFoundError:\r\n # Application is running in GUI-mode\r\n if gui_instance is not None:\r\n messagebox.showerror(\"File not found\", \"The file does not exist.\")\r\n\r\n # Application is running in CLI or debugging mode\r\n if config_var['RUN-MODE']['run_mode'] == \"1\" or config_var['RUN-MODE']['run_mode'] == \"2\":\r\n print(\"\\nError: Could not remove the file from the media folder: The file does not exist.\")\r\n\r\n return False\r\n\r\n except PermissionError:\r\n # Application is running in GUI-mode\r\n if gui_instance is not None:\r\n messagebox.showerror(\"Unable to remove file\", \"Unable to remove file from the media folder. Make \"\r\n \"sure you haven't selected a write-protected folder. If the issue persists, \"\r\n \"try changing the media folder and manually removing the media file from the \"\r\n \"current media folder.\")\r\n\r\n # Application is running in CLI or debugging mode\r\n if config_var['RUN-MODE']['run_mode'] == \"1\" or config_var['RUN-MODE']['run_mode'] == \"2\":\r\n print(\"\\nError: Unable to remove file from the media folder. Make sure you haven't selected a \"\r\n \"write-protected folder. If the issue persists, try changing the media folder and manually \"\r\n \"removing the media file from the current media folder.\")\r\n\r\n return False\r\n\r\n if gui_instance is not None: # The method has been fired by a GUI widget\r\n window.destroy() # Closes the removal window\r\n\r\n # Reloading the media list of the root window\r\n gui_instance.library_items = []\r\n gui_instance.path_frame_parent.destroy()\r\n gui_instance.display_media()\r\n\r\n else: # The method has been fired by using CLI\r\n print(\"\\nThe media file has been removed.\")\r\n\r\n else: # (CLI-only) The user has provided an invalid filename\r\n print(\"\\nError: The specified media file does not exist.\")\r\n return False\r\n\r\n return True",
"def photo_file_cleanup(sender, **kwargs):\n instance = kwargs.get('instance')\n filename = instance.path.url[1:]\n if os.path.exists(filename):\n os.remove(filename)",
"def cleanupResources():\n None",
"def _sanitize_resources(cls, resources):\n\n try:\n for resource in cls._loop_raw(resources):\n cls._sanitize_resource(resource)\n except (KeyError, TypeError):\n _LOGGER.debug(\"no shade data available\")\n return None",
"def test_delete_collection_namespaced_image_stream(self):\n pass",
"def remove_stuff_post_error(self):\n os.system('rm %s' % self.destination)",
"def test_delete_namespaced_image_stream(self):\n pass",
"def test_delete_res_file_deletes_mp_file_object(composite_resource_with_mp_aggregation, mock_irods):\n\n res, user = composite_resource_with_mp_aggregation\n mp_aggr = next(res.logical_files)\n assert isinstance(mp_aggr, ModelProgramLogicalFile)\n res_file = res.files.first()\n assert ModelProgramResourceFileType.objects.count() == 0\n mp_aggregation = ModelProgramLogicalFile.objects.first()\n # set the res_file as software for this aggregation\n ModelProgramResourceFileType.create(file_type='software', res_file=res_file,\n mp_metadata=mp_aggregation.metadata)\n assert ModelProgramResourceFileType.objects.count() == 1\n # delete res_file\n delete_resource_file(pk=res.short_id, filename_or_id=res_file.id, user=user)\n # mp program file type got deleted\n assert ModelProgramResourceFileType.objects.count() == 0\n assert ModelProgramLogicalFile.objects.count() == 0\n assert not res.dangling_aggregations_exist()",
"def clean_resource() -> list:\n helpers.starting_clean_print(RESOURCE_NAME)\n resource_client = boto3.client(BOTO3_NAME)\n resources = get_resources(resource_client)\n terminated_items = delete_resources(resource_client, resources)\n helpers.finished_clean_print(RESOURCE_NAME, terminated_items)\n return terminated_items",
"def prepare_media(self, object):\n if object.media is not None:\n #return object.media.media_file.name\n return '/api/v1/media/{0}/'.format(object.media.id)\n else:\n return ''",
"def clean(self):\n if self.image:\n self.glance.images.delete(self.image['id'])\n\n if self.image_file:\n shutil.rmtree(self.download_path)",
"def tearDown(self):\n if os.path.exists(settings.MEDIA_ROOT):\n shutil.rmtree(settings.MEDIA_ROOT)",
"def tearDown(self):\n if os.path.exists(settings.MEDIA_ROOT):\n shutil.rmtree(settings.MEDIA_ROOT)"
] | [
"0.69884086",
"0.6832411",
"0.6076411",
"0.5952054",
"0.5877735",
"0.586007",
"0.574167",
"0.5715479",
"0.56592447",
"0.56336623",
"0.5573302",
"0.55011815",
"0.5439052",
"0.54167026",
"0.5395553",
"0.5383151",
"0.53797346",
"0.532659",
"0.52952904",
"0.5293007",
"0.5271096",
"0.5266999",
"0.5254611",
"0.52485305",
"0.52427626",
"0.5218256",
"0.5203573",
"0.5191458",
"0.5164114",
"0.5164114"
] | 0.70109206 | 0 |
Cleaning a media resource that only has an image should do nothing. | def test_clean_only_image(image):
resource = models.MediaResource(image=image)
resource.clean() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _handle_removed_media(self):\r\n if self.has_media():\r\n try:\r\n image = str(self.image)\r\n os.remove(image)\r\n except OSError:\r\n raise('Failure trying to remove image from filesystem.')\r\n return True",
"def clean_before_save(self, image_only=False):\n self.image = None\n self.animated = False\n if hasattr(self, \"dict_image\"):\n # self.dict_image = None\n delattr(self, \"dict_image\")\n if hasattr(self, \"list_image\"):\n self.list_image = None\n delattr(self, \"list_image\")",
"def test_clean_no_image_or_youtube_id():\n resource = models.MediaResource()\n\n with pytest.raises(ValidationError):\n resource.clean()",
"def remove_image_file(sender, instance, **kwargs):\n # Pass false so ImageField doesn't save the model.\n instance.image.delete(False)",
"def clean(self):\n if self.image:\n self.glance.images.delete(self.image['id'])\n\n if self.image_file:\n shutil.rmtree(self.download_path)",
"def clear_images(self):\r\n\r\n with translate_errors():\r\n self.audio.clear_pictures()\r\n self.audio.save()\r\n\r\n super().clear_images()",
"def clear_images(self):\r\n\r\n # audio = self.MutagenType(self['filename'])\r\n self.audio.pop(\"metadata_block_picture\", None)\r\n self.audio.pop(\"coverart\", None)\r\n self.audio.pop(\"coverartmime\", None)\r\n self.audio.save()",
"def photo_file_cleanup(sender, **kwargs):\n instance = kwargs.get('instance')\n filename = instance.path.url[1:]\n if os.path.exists(filename):\n os.remove(filename)",
"def applyMorphologicalCleaning(self, image):",
"def clear_renders(self, media_id, owner_username, node):\n\t\ttry:\n\t\t\tmedia_id = validation.media_id(media_id)\n\t\t\towner_username = validation.username(owner_username)\n\t\t\tvalidation.required(node, 'node')\n\t\texcept errors.ValidationError, ex:\n\t\t\treturn utils.return_deferred_error(ex.value)\n\n\t\tself.log.debug(\"Clearing renders for image [%s] in user [%s]'s account\" % (media_id, owner_username))\n\n\t\t@stack\n\t\tdef do_clear(result):\n\t\t\tif result[0] != 0:\n\t\t\t\traise errors.APIError(result[1])\n\n\t\t\tpaths = result[1]\n\t\t\tdl = []\n\t\t\tfor path in paths:\n\t\t\t\tself.log.debug(\"running delete on [%s.jpg]\" % path)\n\t\t\t\tdl.append(self._delete_binary(\"%s.jpg\" % path))\n\t\t\tdList = DeferredList(dl)\n\t\t\tdList.addCallback(lambda _: \"success\")\n\t\t\treturn dList\n\n\n\t\td = self._generate_render_paths(media_id, node, owner_username)\n\t\td.addCallback(do_clear)\n\t\td.addCallback(lambda _: (0, _))\n\t\td.addErrback(lambda _: (-1, _.getErrorMessage))\n\t\treturn d",
"def unpropagateImage(self, dryrun):\n pass",
"def clearImage(self):\n if self.hasImage():\n self.scene.removeItem(self._image)\n self._image = None",
"def delete_test_image(image_field):\n warnings.warn(DeprecationWarning(\n \"delete_test_image() is deprecated in favour of the \"\n \"get_sample_image() context manager.\"), stacklevel=2)\n # ensure all thumbs are deleted\n for filename in glob.glob(\n os.path.join(\n settings.MEDIA_ROOT, 'thumbs', image_field.name.split('/')[-1]\n ) + '*'\n ):\n os.unlink(filename)\n # delete the saved file\n image_field.delete()",
"def test_imagefield_annotate_with_bitmap_image_after_clean(self):\n from PIL.BmpImagePlugin import BmpImageFile\n try:\n Image.register_mime(BmpImageFile.format, None)\n f = ImageField()\n img_path = get_img_path('filepath_test_files/1x1.bmp')\n with open(img_path, 'rb') as img_file:\n img_data = img_file.read()\n\n img_file = SimpleUploadedFile('1x1.bmp', img_data)\n img_file.content_type = 'text/plain'\n\n uploaded_file = f.clean(img_file)\n\n self.assertEqual('BMP', uploaded_file.image.format)\n self.assertIsNone(uploaded_file.content_type)\n finally:\n Image.register_mime(BmpImageFile.format, 'image/bmp')",
"def get_clean_image(image):\n if not image:\n return \"\"\n if \"music@\" in image:\n # fix for embedded images\n thumbcache = xbmc.getCacheThumbName(image).replace(\".tbn\", \".jpg\")\n thumbcache = \"special://thumbnails/%s/%s\" % (thumbcache[0], thumbcache)\n if not xbmcvfs.exists(thumbcache):\n xbmcvfs.copy(image, thumbcache)\n image = thumbcache\n if image and \"image://\" in image:\n image = image.replace(\"image://\", \"\")\n image = urllib.unquote(image.encode(\"utf-8\"))\n if image.endswith(\"/\"):\n image = image[:-1]\n if not isinstance(image, unicode):\n image = image.decode(\"utf8\")\n return image",
"def except_image_only(resource):\n if resource.image is None:\n raise FeatureExtractionError(resource, 400, 'Image resource is required')\n if resource.mask:\n raise FeatureExtractionError(resource, 400, 'Mask resource is not accepted')\n if resource.gobject:\n raise FeatureExtractionError(resource, 400, 'Gobject resource is not accepted')",
"def photo_edit_file_cleanup(sender, **kwargs):\n instance = kwargs.get('instance')\n filename = instance.upload.url[1:]\n if os.path.exists(filename):\n os.remove(filename)",
"def __check_delete_images(self):\n for i, (ctr, usingproperty) in enumerate(zip(self.__using_image_counter, self.__accessed_image)):\n if not ctr and not usingproperty:\n self.__images_cache[i] = None",
"def test_clean_both_image_and_youtube_id(image):\n resource = models.MediaResource(image=image, youtube_id=\"dQw4w9WgXcQ\")\n\n with pytest.raises(ValidationError):\n resource.clean()",
"def strip(self):\n result = library.MagickStripImage(self.wand)\n if not result:\n self.raise_exception()",
"def clearImage(self):\n if self.hasImage():\n self.scene.removeItem(self._pixmapHandle)\n self._pixmapHandle = None\n self.zoom=-1\n self.scene.clear()",
"def clean(context):\n print(f\"Attempting to forcefully remove image {IMAGE_NAME}:{IMAGE_VER}\")\n context.run(f\"docker rmi {IMAGE_NAME}:{IMAGE_VER} --force\")\n print(f\"Successfully removed image {IMAGE_NAME}:{IMAGE_VER}\")",
"def _ensure_empty_image_ok(self):\n if self.ignore_empty:\n return\n\n if len(self) > 1:\n raise RuntimeError(\n \"Cannot write None image at extension %d\" % len(self))\n if 'ndims' in self[0]._info:\n raise RuntimeError(\"Can only write None images to extension zero, \"\n \"which already exists\")",
"def clear_thumbnails(self):",
"def test_cambia_imagen_elimina_la_antigua(self):\n self.image_path = os.path.join(os.path.dirname(__file__), 'image_for_model2.jpg')\n image_path = self.image_obj.image.path\n self.image_obj.image = simple_uploaded_file(self.image_path)\n self.image_obj.save()\n\n self.assertNotEqual(image_path, self.image_obj.image.path)\n self.assertFalse(os.path.exists(image_path))",
"def test_delete_namespaced_image_stream(self):\n pass",
"def check_files(self):\n for f in self.filenames:\n img = cv2.imread(f, int(self.color))\n if img is None:\n os.remove(f)",
"def clean(self):\n self.clean_rally_conf()\n rally.RallyBase.clean_rally_logs()\n if self.image_alt:\n self.cloud.delete_image(self.image_alt)\n if self.flavor_alt:\n self.orig_cloud.delete_flavor(self.flavor_alt.id)\n super().clean()",
"def test_delete_collection_namespaced_image_stream(self):\n pass",
"def clean(self):\n tags = self.get_tags()\n for tag in tags:\n image_name = self.build_image_name(tag)\n try:\n self.client.images.remove(image_name, force=True)\n except Exception as ex:\n print('Cannot remove {}: {}'.format(tag, str(ex)))"
] | [
"0.698834",
"0.67045337",
"0.6504589",
"0.6436783",
"0.6413871",
"0.637336",
"0.6318583",
"0.61001015",
"0.60031515",
"0.59312874",
"0.59246147",
"0.5909386",
"0.58954066",
"0.5854419",
"0.58376265",
"0.58252335",
"0.5816791",
"0.5799392",
"0.5791953",
"0.5740084",
"0.57199484",
"0.57000625",
"0.568925",
"0.566456",
"0.56227756",
"0.5613354",
"0.56076276",
"0.5597479",
"0.55968785",
"0.55814916"
] | 0.7978231 | 0 |
Cleaning a media resource that only has a YouTube video ID should do nothing. | def test_clean_only_youtube_id():
resource = models.MediaResource(youtube_id="dQw4w9WgXcQ")
resource.clean() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_clean_no_image_or_youtube_id():\n resource = models.MediaResource()\n\n with pytest.raises(ValidationError):\n resource.clean()",
"def test_clean_both_image_and_youtube_id(image):\n resource = models.MediaResource(image=image, youtube_id=\"dQw4w9WgXcQ\")\n\n with pytest.raises(ValidationError):\n resource.clean()",
"def clean_video_id(self):\n failed = False\n d = self.cleaned_data\n service = d.get('service')\n # Get the video id and clear whitespace on either side.\n video_id = d.get('video_id', '').strip()\n\n # Validate using YouTube's API:\n if service == 'youtube':\n url = ('http://gdata.youtube.com/feeds/api/videos/{}?alt=json'.\n format(video_id))\n data = requests.get(url)\n # Ensure we can parse the JSON data.\n try:\n json = simplejson.loads(data.text)\n # If not, mark this as a failure.\n except ValueError:\n failed = True\n\n # Validate using Vimeo's API:\n elif service == 'vimeo':\n data = requests.get('http://vimeo.com/api/v2/video/{}.json'.\n format(video_id))\n # Ensure we can parse the JSON data.\n try:\n json = simplejson.loads(data.text)\n # If not, mark this as a failure.\n except ValueError:\n failed = True\n\n # Respond based on the outcome.\n if failed:\n message = _(\"Couldn't validate video id using {} API. Please \"\n \"verify it exists and check for \"\n \"typos.\".format(service))\n raise forms.ValidationError(message)\n\n return video_id",
"def test_video_delete(self):\n v1, v2 = make_video(media_id='1234'), make_video(media_id='2345')\n set_resources_and_sync([v1, v2])\n i1 = mpmodels.MediaItem.objects.get(jwp__key=v1.key)\n i2 = mpmodels.MediaItem.objects.get(jwp__key=v2.key)\n set_resources_and_sync([v1])\n self.assertIsNone(mpmodels.MediaItem.objects.get(id=i1.id).deleted_at)\n self.assertIsNotNone(mpmodels.MediaItem.objects_including_deleted.get(id=i2.id).deleted_at)\n self.assertFalse(mpmodels.MediaItem.objects.filter(id=i2.id).exists())",
"def test_type_youtube():\n resource = models.MediaResource(youtube_id=\"dQw4w9WgXcQ\")\n\n assert resource.type == models.MediaResource.TYPE_YOUTUBE",
"def test_video_removal(self):\n edx_video_id = 'test1'\n remove_url = self.get_url_for_course_key(self.course.id, {'edx_video_id': edx_video_id})\n response = self.client.delete(remove_url, HTTP_ACCEPT=\"application/json\")\n self.assertEqual(response.status_code, 204)\n\n self._assert_video_removal(self.url, edx_video_id, 1)",
"def play_youtube(self, media_id):\n pass",
"def __trim_youtube_link(link: str):\n if \"t=\" in link:\n # chop off \"&t=\", \"?t=\" or \"#t=\"\n link = link.split('t=')[0][:-1]\n return link",
"def verify_media(self):\n self.check_dataset_duplicate_ids(self.media)",
"def test_clean_only_image(image):\n resource = models.MediaResource(image=image)\n\n resource.clean()",
"def clear_unique_video(self):\n self.top_unique_video_entry.delete(0, END)\n self.top_unique_video_box.delete(0, END)\n self.unique_video_found = False\n self.missing_files_label.grid_remove()",
"def play_youtube(self, media_id):\n raise NotImplementedError()",
"def test_parse_youtube_empty(self):\r\n self.assertEqual(VideoDescriptor._parse_youtube(''),\r\n {'0.75': '',\r\n '1.00': '',\r\n '1.25': '',\r\n '1.50': ''})",
"def delete_video(self, video_ID): # WORKS\n try:\n self.cur.execute(\"DELETE FROM videos WHERE video_ID = \\\"{}\\\"\".format(video_ID))\n self.db.commit()\n os.remove('static/videos/' + str(video_ID) + '.mp4')\n os.remove('static/images/' + str(video_ID) + '.jpg')\n except:\n self.db.rollback()",
"def test_parse_youtube_empty(self):\r\n self.assertEqual(\r\n VideoDescriptor._parse_youtube(''),\r\n {'0.75': '',\r\n '1.00': '',\r\n '1.25': '',\r\n '1.50': ''}\r\n )",
"def test_parse_youtube_invalid(self):\r\n\r\n # invalid id\r\n youtube_str = 'thisisaninvalidid'\r\n output = VideoDescriptor._parse_youtube(youtube_str)\r\n self.assertEqual(output, {'0.75': '',\r\n '1.00': '',\r\n '1.25': '',\r\n '1.50': ''})\r\n # another invalid id\r\n youtube_str = ',::,:,,'\r\n output = VideoDescriptor._parse_youtube(youtube_str)\r\n self.assertEqual(output, {'0.75': '',\r\n '1.00': '',\r\n '1.25': '',\r\n '1.50': ''})\r\n\r\n # and another one, partially invalid\r\n youtube_str = '0.75_BAD!!!,1.0:AXdE34_U,1.25:KLHF9K_Y,1.5:VO3SxfeD,'\r\n output = VideoDescriptor._parse_youtube(youtube_str)\r\n self.assertEqual(output, {'0.75': '',\r\n '1.00': 'AXdE34_U',\r\n '1.25': 'KLHF9K_Y',\r\n '1.50': 'VO3SxfeD'})",
"def validate_youtube(fragment):\n request=urllib.urlopen('https://www.youtube.com/watch?v=' + fragment)\n return request.getcode() == 200",
"def validate_song(song):\n attrs = [\"default_arrangement\", \"composer\", \"copyright\", \"youtube\", \"ccli\"]\n for a in attrs:\n if getattr(song, a) in [None, \"None\"]:\n setattr(song, a, \"\")\n return song",
"def allow_video(self, video_id):\n video = self._video_library.get_video(video_id)\n if not self._video_library.get_video(video_id):\n print(\"Cannot remove flag from video: Video does not exist\")\n return\n if not video.flag:\n print(\"Cannot remove flag from video: Video is not flagged\")\n return\n print(f\"Successfully removed flag from video: {video.title}\")\n video.set_flag(None)",
"def test_only_sms_created(self):\n v1, v2 = make_video(media_id='1234'), make_video()\n set_resources_and_sync([v1, v2])\n i1 = mpmodels.MediaItem.objects.filter(jwp__key=v1.key).first()\n self.assertIsNotNone(i1)\n i2 = mpmodels.MediaItem.objects.filter(jwp__key=v2.key).first()\n self.assertIsNone(i2)",
"def __ext_embed_id(self, youtube_url):\n youtube_id_match = re.search(r'(?<=v=)[^&#]+', youtube_url)\n youtube_id_match = youtube_id_match or re.search(\n r'(?<=be/)[^&#]+', youtube_url)\n trailer_youtube_id = (youtube_id_match.group(0) if youtube_id_match\n else None)\n return trailer_youtube_id",
"def test_empty_media(self):\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n manifest['job']['interface']['inputs']['files'][0]['mediaTypes'] = []\n config = copy.deepcopy(self.configuration)\n json_data = {\n 'manifest': manifest,\n 'configuration': config\n }\n\n url = '/%s/job-types/validation/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertDictEqual(results, {u'errors': [], u'is_valid': True, u'warnings': []})\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n del manifest['job']['interface']['inputs']['files'][0]['mediaTypes']\n config = copy.deepcopy(self.configuration)\n json_data = {\n 'manifest': manifest,\n 'configuration': config\n }\n\n url = '/%s/job-types/validation/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertDictEqual(results, {u'errors': [], u'is_valid': True, u'warnings': []})",
"def delete(self, mediaId):\n headers = { 'Authorization' : self.client.authorization_header }\n\n response = requests.delete(\n self.client.url + '/media/' + mediaId,\n headers = headers\n )\n\n #print 'Response: ' + response.text\n return json.loads(response.text)",
"def _get_video_data(youtube_data, playlist=None):\n def get_category(categories):\n for category in categories:\n if category['scheme'].endswith('categories.cat'):\n return category['$t'] # TODO: map category\n media = youtube_data['media$group']\n video = Video(\n source_videoid=media['yt$videoid']['$t'],\n source_listid=playlist,\n source_username=media['media$credit'][0]['$t'],\n date_published=_parse_datetime(youtube_data['published']['$t']),\n title=youtube_data['title']['$t'],\n duration=int(media['yt$duration']['seconds']) if 'yt$duration' in media else -1,\n )\n video.source_category = get_category(media.get('media$category', []))\n video.source_view_count = int(youtube_data['yt$statistics']['viewCount']) if 'yt$statistics' in youtube_data else -1\n video.source_date_uploaded = media['yt$uploaded']['$t']\n access_control = dict(\n (i['action'], i['permission'] == 'allowed')\n for i in youtube_data.get('yt$accessControl', []))\n video.restricted = access_control.get('embed') is False\n if 'app$control' in youtube_data:\n if 'yt$incomplete' in youtube_data['app$control']:\n video.restricted = True\n else:\n state = youtube_data['app$control']['yt$state']\n if state['name'] == 'restricted':\n if state['reasonCode'] == 'limitedSyndication':\n # see https://groups.google.com/d/msg/youtube-api-gdata/on504fCOEk0/oErUbCptWu4J\n video.restricted = not any(c.get('yt$format') == 5 for c in\n media.get('media$content', []))\n else:\n video.restricted = True\n for thumbnail in media.get('media$thumbnail', []):\n if 'time' not in thumbnail:\n video.thumbnails.append(\n VideoThumbnail(\n url=thumbnail['url'],\n width=thumbnail['width'],\n height=thumbnail['height']))\n for restriction in media.get('media$restriction', []):\n if restriction['type'] == 'country':\n video.restrictions.extend(\n VideoRestriction(\n relationship=restriction['relationship'],\n country=country) for country in restriction['$t'].split())\n return video",
"def clear_subs_content(self):\r\n for youtube_id in self.get_youtube_ids().values():\r\n filename = 'subs_{0}.srt.sjson'.format(youtube_id)\r\n content_location = StaticContent.compute_location(self.course.id, filename)\r\n try:\r\n content = contentstore().find(content_location)\r\n contentstore().delete(content.get_id())\r\n except NotFoundError:\r\n pass",
"def test_recreate_deleted_item(self):\n v1 = make_video(media_id='1234', title='testing')\n set_resources_and_sync([v1])\n i1 = mpmodels.MediaItem.objects.filter(jwp__key=v1.key).first()\n self.assertIsNotNone(i1)\n self.assertEqual(i1.title, 'testing')\n i1.delete()\n\n set_resources_and_sync([v1])\n i1 = mpmodels.MediaItem.objects.filter(jwp__key=v1.key).first()\n self.assertIsNotNone(i1)\n self.assertEqual(i1.title, 'testing')",
"def clean():\n\n tracks = []\n removed_playlists = 0\n for playlist in PlaylistManager.find():\n\n if len(playlist.tracks) == 0:\n PlaylistManager.remove(playlist.id)\n removed_playlists += 1\n else:\n tracks += playlist.tracks\n\n tracks = list(set(tracks))\n removed_tracks = 0\n for track in TrackManager.find():\n if track.id not in tracks:\n TrackManager.remove(track.id)\n removed_tracks += 1\n\n click.secho(\"Cleanup removed:\", bold=True)\n click.secho(\n tabulate( # type: ignore\n [\n (magenta(\"Tracks:\"), removed_tracks),\n (magenta(\"Playlists:\"), removed_playlists),\n ],\n tablefmt=\"plain\",\n colalign=(\"right\", \"left\"),\n )\n )",
"def test_api_video_delete_by_playlist_admin(self):\n user = factories.UserFactory()\n playlist = factories.PlaylistFactory()\n factories.PlaylistAccessFactory(\n role=models.ADMINISTRATOR, playlist=playlist, user=user\n )\n video = factories.VideoFactory(playlist=playlist)\n\n jwt_token = AccessToken()\n jwt_token.payload[\"resource_id\"] = str(user.id)\n jwt_token.payload[\"user\"] = {\n \"id\": str(user.id),\n \"username\": user.username,\n }\n\n self.assertEqual(models.Video.objects.count(), 1)\n\n response = self.client.delete(\n f\"/api/videos/{video.id}/\",\n HTTP_AUTHORIZATION=f\"Bearer {jwt_token}\",\n )\n\n self.assertEqual(models.Video.objects.count(), 0)\n self.assertEqual(response.status_code, 204)",
"def __del__(self):\n if self.video:\n self.video.release()",
"def _render_no_tracking(self, video_id):\n you_tube_url = (\n 'https://www.youtube.com/embed/%s'\n '?feature=player_embedded&rel=0') % video_id\n iframe = cElementTree.XML(\"\"\"\n<div class=\"gcb-video-container\">\n <iframe class=\"youtube-player\" title=\"YouTube Video Player\"\n type=\"text/html\" frameborder=\"0\" allowfullscreen=\"allowfullscreen\">\n </iframe>\n</div>\"\"\")\n iframe[0].set('src', you_tube_url)\n return iframe"
] | [
"0.7022397",
"0.66042817",
"0.61575353",
"0.5879848",
"0.56859034",
"0.56468034",
"0.5629039",
"0.5511188",
"0.5497184",
"0.54881966",
"0.54724157",
"0.5448994",
"0.5418817",
"0.541001",
"0.54045886",
"0.53989905",
"0.5339894",
"0.5189437",
"0.51847184",
"0.51762545",
"0.5173873",
"0.5163507",
"0.51614904",
"0.51211625",
"0.5115929",
"0.5114129",
"0.5113201",
"0.5089893",
"0.50760436",
"0.5073908"
] | 0.8092883 | 0 |
If a media resource has an image, its type property should indicate it's an image. | def test_type_image(image):
resource = models.MediaResource(image=image)
assert resource.type == models.MediaResource.TYPE_IMAGE | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_image(content_type):\n return content_type == \"image/jpeg\" or content_type == \"image/png\"",
"def is_image(self):\r\n # we can only get this if we have headers\r\n LOG.debug('content type')\r\n LOG.debug(self.content_type)\r\n if (self.content_type is not None and\r\n self.content_type.lower() in IMAGE_TYPES.values()):\r\n return True\r\n else:\r\n return False",
"def has_media(self):\r\n if self.image:\r\n return True\r\n return False",
"def IsImage(self, filename):\n mimetype = mimetypes.guess_type(filename)[0]\n if not mimetype:\n return False\n return mimetype.startswith(\"image/\")",
"def IsImage(self, filename):\r\n mimetype = mimetypes.guess_type(filename)[0]\r\n if not mimetype:\r\n return False\r\n return mimetype.startswith(\"image/\")",
"def picture(result):\n media = result.entities.get('media')\n if media:\n return media[0].get('type') == u'photo'\n return False",
"def image_media_type(name):\n return name.endswith(('.png', '.jpg', '.jpeg', '.gif', '.tiff', '.tif',\n '.svg'))",
"def test_badge_should_have_image(self):\n\n badge = self.get_sample_badge()\n # It's a string, even though it often looks like a URL\n self.assertIsInstance(badge.image, str)",
"def is_url_image(image_url):\n image_formats = (\"image/png\", \"image/jpeg\", \"image/jpg\")\n r = requests.head(image_url)\n logger.info(f'{image_url} has content type {r.headers[\"content-type\"]}')\n if r.headers[\"content-type\"] in image_formats:\n return True\n return False",
"def image():\n\n headers = get_headers()\n if \"accept\" not in headers:\n return image_png() # Default media type to png\n\n accept = headers[\"accept\"].lower()\n\n if \"image/webp\" in accept:\n return image_webp()\n elif \"image/svg+xml\" in accept:\n return image_svg()\n elif \"image/jpeg\" in accept:\n return image_jpeg()\n elif \"image/png\" in accept or \"image/*\" in accept or \"*/*\" in accept:\n return image_png()\n else:\n return status_code(406) # Unsupported media type",
"def test_restrict_mediatype():\r\n counter = image_hook_counter()\r\n ADDINS = [feed_image_restrict_mediatypes(('image/png', 'image/gif')), counter]\r\n\r\n class TestFeedImage(feedev.File):\r\n content = \"\"\r\n def headers(p):\r\n if p == 1: return {'Content-Type': 'text/plain'}\r\n elif p == 2: return {'Content-Type': 'image/jpeg'}\r\n elif p == 3: return {'Content-Type': 'image/png; charset=ISO-8859-1'} # charsets are correctly parsed out\r\n elif p == 4: return {'Content-Type': 'image/png'}\r\n\r\n class TestFeed(feedev.Feed):\r\n content = FeedWithImage % (TestFeedImage.url)\r\n\r\n def pass1(feed):\r\n assert counter.success == 0\r\n def pass2(feed):\r\n assert counter.success == 0\r\n def pass3(feed):\r\n assert counter.success == 1\r\n def pass4(feed):\r\n assert counter.success == 2\r\n\r\n feedev.testcaller()",
"def isImage(imgref):\n if (imgref.endswith(\"JPG\")):\n return True\n if (imgref.endswith(\"jpg\")):\n return True\n if (imgref.endswith(\"gif\")):\n return True\n if (imgref.endswith(\"png\")):\n return True\n return False",
"def has_image(self):\n return hasattr(self, \"_image\") and self._image is not None",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")",
"def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")"
] | [
"0.71367157",
"0.6835922",
"0.6663885",
"0.6654667",
"0.6619548",
"0.648332",
"0.6373138",
"0.6143732",
"0.6071515",
"0.59954345",
"0.5967518",
"0.596137",
"0.59485954",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796",
"0.5918796"
] | 0.7742908 | 0 |
If a media resource has a YouTube video ID, its type property should indicate it's a YouTube video. | def test_type_youtube():
resource = models.MediaResource(youtube_id="dQw4w9WgXcQ")
assert resource.type == models.MediaResource.TYPE_YOUTUBE | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def play_youtube(self, media_id):\n pass",
"def isYouTube(self):\n if 'youtube' in self.link.split('.'):\n return True\n return None",
"def play_youtube(self, media_id):\n raise NotImplementedError()",
"def on_play(self, event, type=\"yt\", content=None):\n urls = {\n \"https://www.youtube.com/watch?v=\": \"yt\",\n \"https://youtube.com/watch?v=\": \"yt\",\n \"https://youtu.be\": \"yt\",\n \"https://soundcloud.com\": \"sc\",\n } # /watch?v= /watch?v=\n search_prefixs = {\n \"youtube\": \"ytsearch:{}\",\n \"yt\": \"ytsearch:{}\",\n \"soundcloud\": \"scsearch:{}\",\n \"sc\": \"scsearch:{}\",\n }\n self.pre_check(event)\n if event.guild.id not in self.cool_down:\n self.cool_down[event.guild.id] = {}\n if (event.author.id not in self.cool_down[\"general\"] or\n time() - self.cool_down[\"general\"][event.author.id] >= 1):\n if (event.guild.id not in self.cool_down[\"playlist\"] or\n not self.cool_down[\"playlist\"][event.guild.id]):\n self.cool_down[\"general\"][event.author.id] = time()\n if event.guild.get_member(event.author).get_voice_state():\n self.on_join(event)\n self.same_channel_check(event)\n if type not in search_prefixs.keys():\n if type == \"override\":\n if event.author.id not in self.owners:\n return api_loop(\n event.channel.send_message,\n \"You don't own me\",\n )\n video_url = content\n url_found = True\n pass\n elif content is not None:\n content = \"{} {}\".format(type, content)\n type = \"yt\"\n else:\n content = type\n type = \"yt\"\n elif type in search_prefixs.keys() and content is None:\n return api_loop(\n event.channel.send_message,\n \"Search (content) argument missing.\",\n )\n if \"url_found\" not in locals():\n url_found = False\n for url, index in urls.items():\n if url in content:\n url_found = True\n video_url = content\n type = index\n if not url_found:\n if type in search_prefixs:\n video_url = search_prefixs[type].format(content)\n else:\n video_url = search_prefixs[\"yt\"].format(content)\n youtubedl_object = YoutubeDLInput(video_url, command=\"ffmpeg\")\n try:\n yt_data = self.get_ytdl_values(youtubedl_object.info)\n except DownloadError as e:\n return api_loop(\n event.channel.send_message,\n \"Video not avaliable: {}\".format(e),\n )\n if yt_data[\"is_live\"]:\n return api_loop(\n event.channel.send_message,\n \"Livestreams aren't supported\",\n )\n elif yt_data[\"duration\"] > 3620:\n return api_loop(\n event.channel.send_message,\n \"The maximum supported length is 1 hour.\",\n )\n self.get_player(event.guild.id).append(youtubedl_object)\n api_loop(\n event.channel.send_message,\n \"Added ``{}`` by ``{}`` using ``{}``.\".format(\n yt_data[\"title\"],\n yt_data[\"uploader\"],\n yt_data[\"source\"],\n ),\n )\n else:\n api_loop(\n event.channel.send_message,\n \"Currently adding playlist, please wait.\",\n )\n else:\n cool = round(\n Decimal(\n 1 - (time() - self.cool_down[\"general\"][event.author.id]),\n ),\n )\n api_loop(\n event.channel.send_message,\n \"Cool down: {} seconds left.\".format(cool),\n )",
"def testVideoTrackType(self):\n\n trackLine = _buildTrackLine(0, 'video', {'hello': 'goodbye'})\n\n trackID, trackType, trackDict = tools._trackInfo(trackLine)\n\n self.assertEqual(\n 'video',\n trackType,\n )",
"def testVideoTrackType(self):\n\n trackLine = _buildTrackLine(967, 'subtitles', {'hello': 'goodbye'})\n\n trackID, trackType, trackDict = tools._trackInfo(trackLine)\n\n self.assertEqual(\n 'subtitles',\n trackType,\n )",
"def youtube(self):\n return self._youtube",
"def _get_video_data(youtube_data, playlist=None):\n def get_category(categories):\n for category in categories:\n if category['scheme'].endswith('categories.cat'):\n return category['$t'] # TODO: map category\n media = youtube_data['media$group']\n video = Video(\n source_videoid=media['yt$videoid']['$t'],\n source_listid=playlist,\n source_username=media['media$credit'][0]['$t'],\n date_published=_parse_datetime(youtube_data['published']['$t']),\n title=youtube_data['title']['$t'],\n duration=int(media['yt$duration']['seconds']) if 'yt$duration' in media else -1,\n )\n video.source_category = get_category(media.get('media$category', []))\n video.source_view_count = int(youtube_data['yt$statistics']['viewCount']) if 'yt$statistics' in youtube_data else -1\n video.source_date_uploaded = media['yt$uploaded']['$t']\n access_control = dict(\n (i['action'], i['permission'] == 'allowed')\n for i in youtube_data.get('yt$accessControl', []))\n video.restricted = access_control.get('embed') is False\n if 'app$control' in youtube_data:\n if 'yt$incomplete' in youtube_data['app$control']:\n video.restricted = True\n else:\n state = youtube_data['app$control']['yt$state']\n if state['name'] == 'restricted':\n if state['reasonCode'] == 'limitedSyndication':\n # see https://groups.google.com/d/msg/youtube-api-gdata/on504fCOEk0/oErUbCptWu4J\n video.restricted = not any(c.get('yt$format') == 5 for c in\n media.get('media$content', []))\n else:\n video.restricted = True\n for thumbnail in media.get('media$thumbnail', []):\n if 'time' not in thumbnail:\n video.thumbnails.append(\n VideoThumbnail(\n url=thumbnail['url'],\n width=thumbnail['width'],\n height=thumbnail['height']))\n for restriction in media.get('media$restriction', []):\n if restriction['type'] == 'country':\n video.restrictions.extend(\n VideoRestriction(\n relationship=restriction['relationship'],\n country=country) for country in restriction['$t'].split())\n return video",
"def validate_youtube(fragment):\n request=urllib.urlopen('https://www.youtube.com/watch?v=' + fragment)\n return request.getcode() == 200",
"def getMedia(media_type, media_id):\n\n mediaURL = BASE_URL + media_type + \"/\" + str(media_id) + API_KEY\n videoURL = BASE_URL + media_type + \"/\" + str(media_id) + '/videos' + API_KEY\n\n # get the data from the API\n headers = {'Accept': 'application/json'}\n media_request = requests.get(mediaURL, headers=headers)\n video_request = requests.get(videoURL, headers=headers)\n\n # parse to json array\n media_response = media_request.json()\n video_response = video_request.json()\n\n # pull out desired attributes from json data\n data = {\n 'poster': 'http://image.tmdb.org/t/p/w500' + media_response[\"poster_path\"],\n 'title': media_response[\"title\"],\n 'storyline': media_response[\"overview\"],\n 'trailer': 'https://www.youtube.com/watch?v=' + video_response[\"results\"][0][\"key\"]\n }\n\n return data",
"def allow_video(self, video_id):\n print(\"allow_video needs implementation\")",
"def allow_video(self, video_id):\n print(\"allow_video needs implementation\")",
"def allow_video(self, video_id):\n print(\"allow_video needs implementation\")",
"def allow_video(self, video_id):\n print(\"allow_video needs implementation\")",
"def get_video(self):\n if self.parsing_template.video and self.parsing_template.video in self.headline.url:\n return True\n return False",
"def _get_activity_type(self, video):\n\n activity_type = \"https://w3id.org/xapi/video/activity-type/video\"\n\n # When the video is a live we change the activity to webinar\n if video.is_live:\n activity_type = \"http://id.tincanapi.com/activitytype/webinar\"\n\n return activity_type",
"def is_video(self):\n val = False\n if self.__dict__['codec_type']:\n if self.__dict__['codec_type'] == 'video':\n val = True\n return val",
"def __ext_embed_id(self, youtube_url):\n youtube_id_match = re.search(r'(?<=v=)[^&#]+', youtube_url)\n youtube_id_match = youtube_id_match or re.search(\n r'(?<=be/)[^&#]+', youtube_url)\n trailer_youtube_id = (youtube_id_match.group(0) if youtube_id_match\n else None)\n return trailer_youtube_id",
"def test_get_video_id_from_url(self):\n self.assertEqual(get_video_id_from_url('https://www.youtube.com/embed/DqGwxR_0d1M'),\n 'DqGwxR_0d1M')\n self.assertEqual(get_video_id_from_url('https://youtu.be/DqGwxR_0d1M'), 'DqGwxR_0d1M')\n self.assertEqual(\n get_video_id_from_url('https://www.youtube.com/watch?v=DqGwxR_0d1M&feature=youtu.be'),\n 'DqGwxR_0d1M')\n self.assertEqual(get_video_id_from_url('https://www.youtube.com/watch?v=DqGwxR_0d1M'),\n 'DqGwxR_0d1M')",
"def test_clean_only_youtube_id():\n resource = models.MediaResource(youtube_id=\"dQw4w9WgXcQ\")\n\n resource.clean()",
"def google_youtube_check(id):\n\tif not API_KEY:\n\t\traise ConfigException(\"Require API_KEY for googleapi. Reload after setting.\")\n\td = {\"id\" : quote(id.encode(\"utf-8\")), \"part\" : \"id,status\", \"key\" : API_KEY}\n\t\n\tf = urlopen(YOUTUBE_INFO_URL % (urlencode(d)))\n\tytdata = load(f)\n\tif not ytdata.get(\"items\"): # if there are no items for the ID search, return False\n\t\treturn False\n\treturn True",
"def test_list_media_type(self):\n\n # check if documentalist has access to list media-types\n self.login_documentalist()\n response = self.client.get('/multimedia/media-types/' )\n\n # 403 = unauthorized\n self.assertEqual(response.status_code, 403)\n\n self.client.logout()\n self.login_admin()\n\n response = self.client.get('/multimedia/media-types/')\n self.assertContains(response, \"Video\")",
"def filter_yt(info: interceptor.Request):\n\turl = info.request_url\n\tif (url.host() == 'www.youtube.com' and\n\t\t\turl.path() == '/get_video_info' and\n\t\t\t'&adformat=' in url.query()):\n\t\tinfo.block()",
"def get_embed_video_model_string():\n return getattr(settings, \"WAGTAILEMBEDVIDEOS_EMBEDVIDEO_MODEL\", \"wagtail_embed_videos.EmbedVideo\")",
"def __init__(self, url, params=None):\n super(YoutubeVideo, self).__init__(url, params)\n self.video_id = self.get_video_id()",
"def test_note_related_with_work_order_has_video_attachment(fake_note_with_video_attachment):\n\n note_id_value: str = str(uuid.uuid4())\n a_note = Note.from_dict(note_id_value, fake_note_with_video_attachment)\n assert a_note.has_video_attachment()\n assert a_note.is_document\n assert \"video\" in a_note.mime_type\n assert a_note.object_type == \"msdyn_workorder\"",
"def compose_embed_youtube(video_id = None):\n assert(video_id != None)\n return \"http://www.youtube.com/embed/{0}?enablejsapi=1&wmode=opaque\".format(\n video_id\n )",
"def media_content_type(self):\n return MEDIA_TYPE_MUSIC\n # return MEDIA_TYPE_PLAYLIST",
"def has_video(self):\n return self.__video_format is not None",
"def get_video_id(self):\n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n p = urlparse.urlparse(self.youtube_fix_url(self.original_url))\n if p.path == '/watch':\n # Url of type http://www.youtube.com/watch?v=KRaeHxwZvms&feature=g-u-u&context=G2b00124FUAAAAAAAAAA\n #logger.debug('is a watch')\n params = cgi.parse_qs(p.query)\n if 'v' in params:\n return params['v'][0]\n elif p.fragment.startswith('/watch?v='):\n # sample. http://m.youtube.com/#/watch?v=ZXkW1-HdRC8\n params = cgi.parse_qs(p.fragment)\n if '/watch?v' in params:\n return params['/watch?v'][0]\n elif p.path.startswith('/v/') or p.path.startswith('/embed/'):\n path = p.path.split('/')\n return path[-1]\n elif p.netloc == 'youtu.be':\n return p.path[1:]\n elif re.match('(.{1}/){3}([\\w+-_^/]+)', p.fragment):\n parts = p.fragment.split('/')\n return parts[-1]\n return ''"
] | [
"0.6823963",
"0.6766844",
"0.6746436",
"0.6112856",
"0.60233295",
"0.59815586",
"0.5897715",
"0.58677566",
"0.5842102",
"0.58315057",
"0.580741",
"0.580741",
"0.580741",
"0.580741",
"0.58059746",
"0.579559",
"0.579133",
"0.5763148",
"0.57312804",
"0.57054985",
"0.56609565",
"0.56567675",
"0.56507206",
"0.5647443",
"0.56471074",
"0.56462425",
"0.5623117",
"0.56217027",
"0.5602246",
"0.5602242"
] | 0.78130394 | 0 |
Extract text and other things from the raw_html for this document. | def extract(self, doc, raw_html):
super(KenyaTodayCrawler, self).extract(doc, raw_html)
soup = BeautifulSoup(raw_html)
# gather title
doc.title = soup.find(attrs={"property":"og:title"})['content']
#gather publish date
date = self.extract_plaintext(soup.select("main.content .entry-meta .entry-time"))
doc.published_at = self.parse_timestamp(date)
nodes = soup.select(".content .entry-content p")
self.log.info(nodes)
if len(nodes) > 1:
doc.summary = self.extract_plaintext(nodes[0:1])
doc.text = "\n\n".join(p.text.strip() for p in nodes[2:])
doc.author = Author.unknown() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def extract_all_text(self, url, html_doc):\n self.title_text = self.get_title_words(html_doc)\n self.meta_text = self.get_meta_words(html_doc)\n self.url_text = self.get_url_words(url)\n self.heading_text = self.get_heading_words(html_doc)\n self.body_text = self.get_body_words(html_doc)",
"def get_text_hook(raw):\n soup = bs4.BeautifulSoup(quopri.decodestring(raw), features=\"lxml\")\n return soup.text",
"def _get_text(raw_html):\n bs = BeautifulSoup(raw_html)\n text_nodes = bs.find_all(_is_text_tag)\n text_elements = [_get_child_text(node) for node in text_nodes]\n return ' '.join(chain(*chain(*text_elements)))",
"def process_html(raw_html_text):\n\tbounds_list = pre_proc.get_page_bounds(raw_html_text)\n\n\tprocessed_text_html = ( pre_proc.split_spans(raw_html_text) \t| p(pre_proc.delete_non_textual_elements)\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t| p(pre_proc.delete_headers, bounds_list)\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t| p(pre_proc.delete_vertical_text)\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t| p(pre_proc.sort_html)\n\t\t)\n\treturn processed_text_html",
"def extract(content):\n parser = MyHTMLParser()\n parser.textBody = []\n parser.feed(content)\n textBody = parser.textBody\n textBody = \" \".join(textBody)\n textBody = textBody.replace('\\xa0', \" \")\n return textBody.strip()",
"def parseSearchHtml(self):\n pass",
"def parseSearchHtml(self):\n pass",
"def extract_raw_text(soup, url):\n \n title_class = \"nom-notice\"\n title = soup.find(class_=title_class)\n raw_infos = {}\n raw_infos['name'] = title.contents[0].replace(u'\\xa0', ' ')\n \n notice = soup.find(class_=\"notice\")\n \n summary = notice.find(class_=\"chapo\")\n if summary is not None:\n first_para = summary.find_all('p', recursive=False)[-1]\n first_para.tag = 'div'\n first_para['class'] = 'summary'\n raw_infos['summary'] = unicode(first_para)\n \n else:\n raw_infos['summary'] = unicode('')\n\n article = notice.find(class_='texte')\n if article is not None:\n article['class'] = 'article'\n raw_infos['article'] = unicode(article)\n \n sources = notice.find(class_='sources')\n raw_infos['sources'] = unicode(sources)\n \n works = notice.find(class_='oeuvres')\n if works is not None:\n works['class'] = 'works'\n raw_infos['works'] = unicode(works)\n \n # In function that writes, encode everything to bytes! .encode('utf-8')\n return raw_infos",
"def extract_page_text(html):\n soup = bs4.BeautifulSoup(html)\n\n # Remove <script/> and <style/> content\n for script in soup([\"script\", \"style\"]):\n script.extract()\n\n text = soup.get_text()\n\n # Strip leading and trailing whitespace from each line, then join all the\n # non-empty lines together.\n lines = (line.strip() for line in text.splitlines())\n text = '\\n'.join(line for line in lines if line)\n\n return text",
"def raw_text(self):\n\t\t\n\t\t #eliminating more headers\n\t\traw_text = re.sub(r\".*OPERATIONS O[PF].*\",r\"\",self.doc)\n\t\traw_text = re.sub(r\"Page \\d+\",r\"\",raw_text)\n\t\traw_text = re.sub(r\".*B[lL]OCK.*\",r\"\",raw_text)\n\t\traw_text = re.sub(r\".*WEST GULF.*\",r\"\",raw_text)\n\t\traw_text = re.sub(r\".*NAVAL FORCES ON.*\",r\"\",raw_text)\n\t\traw_text = re.sub(r\"\\s\",r\" \", raw_text) #eliminating tabs etc. \t \t \n\t\treturn raw_text",
"def _get_text(self, remove_newlines=True):\n if not self.text:\n url = self.url\n try:\n self.log.debug(\"Try to get content from page {}\".format(url))\n r = requests.get(url)\n except requests.exceptions.RequestException as e:\n self.log.warn(\"Unable to get page content of the url: {url}. \"\n \"The reason: {exc!r}\".format(url=url, exc=e))\n raise ParsingError(e.strerror)\n\n ud = UnicodeDammit(r.content, is_html=True)\n\n enc = ud.original_encoding.lower()\n declared_enc = ud.declared_html_encoding\n if declared_enc:\n declared_enc = declared_enc.lower()\n # possible misregocnition of an encoding\n if (declared_enc and enc != declared_enc):\n detect_dict = chardet.detect(r.content)\n det_conf = detect_dict[\"confidence\"]\n det_enc = detect_dict[\"encoding\"].lower()\n if enc == det_enc and det_conf < THRESHOLD_OF_CHARDETECT:\n enc = declared_enc\n # if page contains any characters that differ from the main\n # encoding we will ignore them\n content = r.content.decode(enc, \"ignore\").encode(enc)\n htmlparser = etree.HTMLParser(encoding=enc)\n root = etree.HTML(content, parser=htmlparser)\n etree.strip_elements(root, html.etree.Comment, \"script\", \"style\")\n text = html.tostring(root, method=\"text\", encoding=\"unicode\")\n\n if remove_newlines:\n self.log.debug(str(type(text)))\n text = re.sub('\\s+', ' ', text)\n self.text = text\n\n return self.text",
"def extract_page_text(self, bs_object):\n\n # kill all script and style elements\n for script in bs_object([\"script\", \"style\", \"head\"]):\n script.extract() # rip it out\n\n # get text\n text = bs_object.get_text()\n\n # break into lines and remove leading and trailing space on each\n lines = (line.strip() for line in text.splitlines())\n # break multi-headlines into a line each\n chunks = (phrase.strip() for line in lines for phrase in line.split(\" \"))\n # drop blank lines\n text_list_gen = (chunk for chunk in chunks if chunk)\n text_list = list(text_list_gen)\n # print \"TEXT LIST >>>\\n\", text_list\n \n return text_list",
"def parseHtmlText(text):\n # text processing\n raw = BeautifulSoup(text.text, 'html.parser').get_text()\n nltk.data.path.append('./nltk_data/') # set the path\n tokens = nltk.word_tokenize(raw)\n text = nltk.Text(tokens)\n # remove punctuation, count raw words\n nonPunct = re.compile('.*[A-Za-z].*')\n raw_words = [w for w in text if nonPunct.match(w)]\n raw_word_count = Counter(raw_words)\n # stop words\n no_stop_words = [w for w in raw_words if w.lower() not in stops]\n no_stop_words_count = Counter(no_stop_words)\n return raw_word_count, no_stop_words_count",
"def extractText(postSoup):\n for tag in postSoup.findAll(True):\n if tag.name in (\"code\"):\n tag.extract()\n else:\n tag.hidden=True\n\n return postSoup.renderContents()",
"def process_doc_html(self, doc_in):\n self.feed(doc_in) #SGMLParser call\n self.close() #SGMLParser call\n self.hand_off_temp_pieces('to_doc_pieces')\n self.all_pieces = self.all_pieces[:-16] # drop </body></html>\n return self.all_pieces",
"def extractText(text):\n soup = BeautifulSoup(text, 'html.parser')\n for code in soup.find_all('code'):\n code.decompose()\n return soup.get_text()",
"def _extract_html(self, url):\n self.response = requests.get(url, timeout=5)\n self.html = BeautifulSoup(self.response.content, \"lxml\") if self.response.ok else None\n # return self.html",
"def extractText(html_code):\n html_tree = html.fromstring(html_code)\n chapter_list = html_tree.find_class(\"chapter\")\n chapter_text = chapter_list[0].text_content()\n return chapter_text",
"def _html(self, text):\r\n html = URL_REGEX.sub(self._parse_urls, text)\r\n html = USERNAME_REGEX.sub(self._parse_users, html)\r\n html = LIST_REGEX.sub(self._parse_lists, html)\r\n return HASHTAG_REGEX.sub(self._parse_tags, html)",
"def extract_text(soup, result):\n if soup:\n for t in soup.children:\n if type(t) == NavigableString:\n # Text content node\n result.append(t)\n elif isinstance(t, NavigableString):\n # Comment, CDATA or other text data: ignore\n pass\n elif t.name in whitespace_tags:\n # Tags that we interpret as whitespace, such as <br> and <img>\n result.append_whitespace()\n elif t.name in block_tags:\n # Nested block tag\n result.begin() # Begin block\n extract_text(t, result)\n result.end() # End block\n elif t.name not in exclude_tags:\n # Non-block tag\n extract_text(t, result)",
"def HTMLparser(self):\n soup = self.getHTML()\n \n # Sort through all the text in the html:\n for text in soup.find_all('p'):\n try:\n paragraphNo = int(text.parent.p['id'][14:])\n \n # Only grab paragraphs in \"On the Social Contract\"\n if paragraphNo < self.START_PARAGRAPH or paragraphNo > self.END_PARAGRAPH:\n continue\n \n elif text.string:\n \n # Ignore those \"paragraphs\" in the html that simply outline different chapters/books\n if re.search('^(CHAPTER|BOOK)(.*):', text.string):\n continue\n \n else:\n \n # Want to read in the document by sentence (for RousseauBot to use individually later on)\n tempList = re.split('(?<!etc)\\.\\s(?!.*\\\")|\\!', text.string)\n for sentence in tempList:\n \n # When a \"paragraph\" is just a single sentence, re's .split() returns the sentence and a ''\n # Also, remove overly long quotes - Twitter has char limit\n if sentence != '' and len(sentence.strip()) < self.TWITTER_LIMIT:\n self.quotes.append(sentence.strip())\n \n except KeyError:\n \n # BS throws KeyError when <p>'s id field is blank; ignore - all paragraphs I need has an id\n continue",
"def parsed_html():\n return utils.parse_html(\n \"\"\"\n <!doctype hmtl>\n <html>\n <head>\n <meta charset=\"utf-8\">\n <meta name=\"viewport\" content=\"width=device-width\">\n <title>Page title</title>\n <link rel=\"stylesheet\" href=\"/static/styles.css\" />\n </head>\n <body>\n <h1>Django Auto AMP</h1>\n <p>Generate automatic AMP from your Django templates</p>\n <img src=\"/static/img.jpg\" width=\"500\" height=\"300\" />\n <img src=\"/static/img.gif\" layout=\"nodisplay\" />\n <img src=\"/static/img.png\" />\n <script type=\"text/javascript\" src=\"/static/scripts.js\" />\n <script type=\"application/json\" src=\"/static/data.json\" />\n </body>\n </html>\n \"\"\"\n )",
"def extract_text(html, guess_punct_space=True):\n sel = cleaned_selector(html)\n return selector_to_text(sel, guess_punct_space=guess_punct_space)",
"def _html_text(self, html):\n ee = None\n try: return html.html_text()\n except Exception, e: ee = e; pass\n try: return html.xml_text()\n except Exception, e: print \"HtmlDocument/text\", ee, e; pass\n try: return str(html)\n except Exception, e: print \"HtmlDocument/text\", e; return \" \"",
"def _strip_excerpt(self, raw_html):\n clean_regex = re.compile(\"<.*?>\")\n clean_text = re.sub(clean_regex, \"\", raw_html)\n return html.unescape(clean_text).replace(\"\\n\", \"\")",
"def ExtractText(self, zhtmlstring):\n # Not defined in init due to Python2/Python3 complications.\n # pylint: disable=attribute-defined-outside-init\n self._text = []\n self.feed(zhtmlstring)\n self.close()\n self._text = [line.strip() for line in self._text]\n return ' '.join(self._text)",
"def convert_content(self, html):\n\n try:\n dom = BeautifulSoup(html, 'html.parser')\n return self.parse_content(dom)\n except:\n return html",
"def from_html(self, content):\r\n pass",
"def parse_source(html, encoding='utf-8'):\n return BeautifulSoup(html, from_encoding=encoding)",
"def html_to_text(html):\n s = TextExtractorHTMLParser()\n s.feed(html)\n return s.get_text()"
] | [
"0.6755422",
"0.6671859",
"0.6553396",
"0.64243054",
"0.6272383",
"0.623531",
"0.623531",
"0.6228035",
"0.61742455",
"0.61664754",
"0.61556983",
"0.6104006",
"0.6092044",
"0.60168433",
"0.59852856",
"0.59402233",
"0.5923603",
"0.59181535",
"0.59116304",
"0.58720744",
"0.5800878",
"0.5799446",
"0.57864213",
"0.5786221",
"0.5746921",
"0.5743207",
"0.5730884",
"0.5718094",
"0.57174754",
"0.5703741"
] | 0.73629075 | 0 |
phase5 requires a 4edge combo where none of the edges are in the zplane. phase4 will put a 4edge combo into that state. There are 12!/(4!8!) or 495 different 4edge combinations. Try them all and see which one has the lowest phase4 cost. | def find_first_four_edges_to_pair(self):
original_state = self.state[:]
original_solution = self.solution[:]
original_solution_len = len(self.solution)
results = []
for wing_str_index, wing_str_combo in enumerate(itertools.combinations(wing_strs_all, 4)):
wing_str_combo = sorted(wing_str_combo)
self.state = original_state[:]
self.solution = original_solution[:]
self.lt_phase4.wing_strs = wing_str_combo
if self.lt_phase4.solve():
phase4_solution = self.solution[original_solution_len:]
phase4_solution_len = len(phase4_solution)
results.append((phase4_solution_len, wing_str_combo))
logger.debug(
f"{wing_str_index+1}/495 {wing_str_combo} phase-4 solution length is {phase4_solution_len}"
)
else:
logger.debug(f"{wing_str_index+1}/495 {wing_str_combo} phase-4 solution length is >= 4 ")
self.lt_phase4.fh_txt_cache = {}
self.state = original_state[:]
self.solution = original_solution[:]
results.sort()
return results | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def phase_5(self):\n test_board_1 = board(5, 5, [1, 1], [0, 0])\n render = Render_engine('terminal', test_board_1)\n\n render.render_terminal(test_board_1)",
"def cost_function_SO4(params: list):\n cost = 0\n SO4 = SO4_circuit(params[0], params[1], params[2], params[3], params[4], params[5])\n\n for i in range(4):\n for j in range(4):\n cost += abs(SO4[i][j] - U[i][j])\n\n # identity_goal = SO4 @ np.linalg.inv(U)\n # for i in range(4):\n # for j in range(4):\n # cost += abs(identity_goal[i][j] - I4[i][j])\n\n return cost",
"def rk4_singleStep(odes, state, parameters, dt):\n k1 = dt * odes(state, parameters)\n k2 = dt * odes(state + 0.5 * k1, parameters)\n k3 = dt * odes(state + 0.5 * k2, parameters)\n k4 = dt * odes(state + k3, parameters)\n return state + (k1 + 2 * k2 + 2 * k3 + k4) / 6",
"def ramp5p(params, phase, args=dict(n=5, guess=[1, -0.32, 2, -0.08, 2])):\n # 2013-12-07 14:08 IJMC: Created.\n\n return params[0] * (1. + np.exp(-params[1]*phase + params[2]) + \\\n np.exp(-params[3]*phase + params[4]))",
"def ex_4pdeer(param): \r\n param = _parsargs(param,npar=1) \r\n \r\n # Dipolar pathways\r\n lam = param[0]\r\n pathways = [\r\n [1-lam],\r\n [lam, 0]\r\n ]\r\n return pathways",
"def test_4_2_5D_cube_splits(self):\n check = [(0, 0, 0, 0, 0), (1, 1, 1, 1, 1), (1, 0, 0, 0, 0),\n (1, 1, 0, 0, 0), (1, 1, 1, 0, 0), (1, 1, 1, 1, 0),\n (1, 1, 1, 0, 1), (1, 1, 0, 1, 0), (1, 1, 0, 1, 1),\n (1, 1, 0, 0, 1), (1, 0, 1, 0, 0), (1, 0, 1, 1, 0),\n (1, 0, 1, 1, 1), (1, 0, 1, 0, 1), (1, 0, 0, 1, 0),\n (1, 0, 0, 1, 1), (1, 0, 0, 0, 1), (0, 1, 0, 0, 0),\n (0, 1, 1, 0, 0), (0, 1, 1, 1, 0), (0, 1, 1, 1, 1),\n (0, 1, 1, 0, 1), (0, 1, 0, 1, 0), (0, 1, 0, 1, 1),\n (0, 1, 0, 0, 1), (0, 0, 1, 0, 0), (0, 0, 1, 1, 0),\n (0, 0, 1, 1, 1), (0, 0, 1, 0, 1), (0, 0, 0, 1, 0),\n (0, 0, 0, 1, 1), (0, 0, 0, 0, 1), (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.5), (0.0, 0.0, 0.0, 0.0, 0.5),\n (0.0, 0.0, 0.0, 0.5, 0.0), (0.0, 0.0, 0.5, 0.0, 0.5),\n (0.0, 0.0, 0.5, 0.0, 0.0), (0.0, 0.0, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.0, 0.5, 0.5), (0.0, 0.5, 0.0, 0.0, 0.5),\n (0.0, 0.5, 0.0, 0.0, 0.0), (0.0, 0.5, 0.0, 0.5, 0.0),\n (0.0, 0.5, 0.5, 0.0, 0.5), (0.0, 0.5, 0.5, 0.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0), (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.5, 0.0, 0.0, 0.5, 0.5), (0.5, 0.0, 0.0, 0.0, 0.5),\n (0.5, 0.0, 0.0, 0.0, 0.0), (0.5, 0.0, 0.0, 0.5, 0.0),\n (0.5, 0.0, 0.5, 0.0, 0.5), (0.5, 0.0, 0.5, 0.0, 0.0),\n (0.5, 0.0, 0.5, 0.5, 0.0), (0.5, 0.5, 0.0, 0.5, 0.5),\n (0.5, 0.5, 0.0, 0.0, 0.5), (0.5, 0.5, 0.0, 0.0, 0.0),\n (0.5, 0.5, 0.0, 0.5, 0.0), (0.5, 0.5, 0.5, 0.0, 0.5),\n (0.5, 0.5, 0.5, 0.0, 0.0), (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.25, 0.25, 0.25, 0.25, 0.25), (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5), (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5), (1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 0.5), (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 1.0, 0.5, 0.5, 1.0), (1.0, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5), (1.0, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0), (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 0.5, 1.0, 1.0), (1.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5), (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5), (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0), (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 1.0), (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5), (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0), (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5), (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0), (0.75, 0.75, 0.75, 0.75, 0.75),\n (1.0, 0.0, 0.5, 0.5, 0.5), (1.0, 0.0, 0.0, 0.5, 0.5),\n (1.0, 0.0, 0.0, 0.0, 0.5), (1.0, 0.0, 0.0, 0.5, 0.0),\n (1.0, 0.0, 0.5, 0.0, 0.5), (1.0, 0.0, 0.5, 0.0, 0.0),\n (1.0, 0.0, 0.5, 0.5, 0.0), (1.0, 0.5, 0.0, 0.5, 0.5),\n (1.0, 0.5, 0.0, 0.0, 0.5), (1.0, 0.5, 0.0, 0.0, 0.0),\n (1.0, 0.5, 0.0, 0.5, 0.0), (1.0, 0.5, 0.5, 0.0, 0.5),\n (1.0, 0.5, 0.5, 0.0, 0.0), (1.0, 0.5, 0.5, 0.5, 0.0),\n (0.75, 0.25, 0.25, 0.25, 0.25), (1.0, 1.0, 0.0, 0.5, 0.5),\n (1.0, 1.0, 0.0, 0.0, 0.5), (1.0, 1.0, 0.0, 0.5, 0.0),\n (1.0, 1.0, 0.5, 0.0, 0.5), (1.0, 1.0, 0.5, 0.0, 0.0),\n (1.0, 1.0, 0.5, 0.5, 0.0), (0.5, 1.0, 0.0, 0.5, 0.5),\n (0.5, 1.0, 0.0, 0.0, 0.5), (0.5, 1.0, 0.0, 0.0, 0.0),\n (0.5, 1.0, 0.0, 0.5, 0.0), (0.5, 1.0, 0.5, 0.0, 0.5),\n (0.5, 1.0, 0.5, 0.0, 0.0), (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.75, 0.75, 0.25, 0.25, 0.25), (1.0, 1.0, 1.0, 0.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.0), (1.0, 0.5, 1.0, 0.0, 0.5),\n (1.0, 0.5, 1.0, 0.0, 0.0), (1.0, 0.5, 1.0, 0.5, 0.0),\n (0.5, 1.0, 1.0, 0.0, 0.5), (0.5, 1.0, 1.0, 0.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0), (0.5, 0.5, 1.0, 0.0, 0.5),\n (0.5, 0.5, 1.0, 0.0, 0.0), (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.75, 0.75, 0.75, 0.25, 0.25), (1.0, 1.0, 0.5, 1.0, 0.0),\n (1.0, 0.5, 1.0, 1.0, 0.0), (1.0, 0.5, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 1.0, 0.0), (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 0.5, 1.0, 1.0, 0.0), (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.75, 0.75, 0.75, 0.75, 0.25), (1.0, 1.0, 0.5, 0.0, 1.0),\n (1.0, 0.5, 1.0, 0.0, 1.0), (1.0, 0.5, 0.5, 0.0, 1.0),\n (0.5, 1.0, 1.0, 0.0, 1.0), (0.5, 1.0, 0.5, 0.0, 1.0),\n (0.5, 0.5, 1.0, 0.0, 1.0), (0.5, 0.5, 0.5, 0.0, 1.0),\n (0.75, 0.75, 0.75, 0.25, 0.75), (1.0, 1.0, 0.0, 1.0, 0.5),\n (1.0, 0.5, 0.0, 1.0, 0.5), (1.0, 0.5, 0.0, 1.0, 0.0),\n (0.5, 1.0, 0.0, 1.0, 0.5), (0.5, 1.0, 0.0, 1.0, 0.0),\n (0.5, 0.5, 0.0, 1.0, 0.5), (0.5, 0.5, 0.0, 1.0, 0.0),\n (0.75, 0.75, 0.25, 0.75, 0.25), (1.0, 1.0, 0.0, 0.5, 1.0),\n (1.0, 0.5, 0.0, 1.0, 1.0), (1.0, 0.5, 0.0, 0.5, 1.0),\n (0.5, 1.0, 0.0, 1.0, 1.0), (0.5, 1.0, 0.0, 0.5, 1.0),\n (0.5, 0.5, 0.0, 1.0, 1.0), (0.5, 0.5, 0.0, 0.5, 1.0),\n (0.75, 0.75, 0.25, 0.75, 0.75), (1.0, 0.5, 0.0, 0.0, 1.0),\n (0.5, 1.0, 0.0, 0.0, 1.0), (0.5, 0.5, 0.0, 0.0, 1.0),\n (0.75, 0.75, 0.25, 0.25, 0.75), (1.0, 0.0, 1.0, 0.5, 0.5),\n (1.0, 0.0, 1.0, 0.0, 0.5), (1.0, 0.0, 1.0, 0.5, 0.0),\n (0.5, 0.0, 1.0, 0.5, 0.5), (0.5, 0.0, 1.0, 0.0, 0.5),\n (0.5, 0.0, 1.0, 0.0, 0.0), (0.5, 0.0, 1.0, 0.5, 0.0),\n (0.75, 0.25, 0.75, 0.25, 0.25), (1.0, 0.0, 1.0, 1.0, 0.5),\n (1.0, 0.0, 0.5, 1.0, 0.5), (1.0, 0.0, 0.5, 1.0, 0.0),\n (0.5, 0.0, 1.0, 1.0, 0.5), (0.5, 0.0, 1.0, 1.0, 0.0),\n (0.5, 0.0, 0.5, 1.0, 0.5), (0.5, 0.0, 0.5, 1.0, 0.0),\n (0.75, 0.25, 0.75, 0.75, 0.25), (1.0, 0.0, 1.0, 0.5, 1.0),\n (1.0, 0.0, 0.5, 1.0, 1.0), (1.0, 0.0, 0.5, 0.5, 1.0),\n (0.5, 0.0, 1.0, 1.0, 1.0), (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.5, 0.0, 0.5, 1.0, 1.0), (0.5, 0.0, 0.5, 0.5, 1.0),\n (0.75, 0.25, 0.75, 0.75, 0.75), (1.0, 0.0, 0.5, 0.0, 1.0),\n (0.5, 0.0, 1.0, 0.0, 1.0), (0.5, 0.0, 0.5, 0.0, 1.0),\n (0.75, 0.25, 0.75, 0.25, 0.75), (1.0, 0.0, 0.0, 1.0, 0.5),\n (0.5, 0.0, 0.0, 1.0, 0.5), (0.5, 0.0, 0.0, 1.0, 0.0),\n (0.75, 0.25, 0.25, 0.75, 0.25), (1.0, 0.0, 0.0, 0.5, 1.0),\n (0.5, 0.0, 0.0, 1.0, 1.0), (0.5, 0.0, 0.0, 0.5, 1.0),\n (0.75, 0.25, 0.25, 0.75, 0.75), (0.5, 0.0, 0.0, 0.0, 1.0),\n (0.75, 0.25, 0.25, 0.25, 0.75), (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.5), (0.0, 1.0, 0.0, 0.0, 0.5),\n (0.0, 1.0, 0.0, 0.5, 0.0), (0.0, 1.0, 0.5, 0.0, 0.5),\n (0.0, 1.0, 0.5, 0.0, 0.0), (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.25, 0.75, 0.25, 0.25, 0.25), (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.0, 0.5), (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5), (0.0, 0.5, 1.0, 0.0, 0.5),\n (0.0, 0.5, 1.0, 0.0, 0.0), (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.25, 0.75, 0.75, 0.25, 0.25), (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5), (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5), (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.5), (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.25, 0.75, 0.75, 0.75, 0.25), (0.0, 1.0, 1.0, 0.5, 1.0),\n (0.0, 1.0, 0.5, 1.0, 1.0), (0.0, 1.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 1.0, 1.0, 1.0), (0.0, 0.5, 1.0, 0.5, 1.0),\n (0.0, 0.5, 0.5, 1.0, 1.0), (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.25, 0.75, 0.75, 0.75, 0.75), (0.0, 1.0, 0.5, 0.0, 1.0),\n (0.0, 0.5, 1.0, 0.0, 1.0), (0.0, 0.5, 0.5, 0.0, 1.0),\n (0.25, 0.75, 0.75, 0.25, 0.75), (0.0, 1.0, 0.0, 1.0, 0.5),\n (0.0, 0.5, 0.0, 1.0, 0.5), (0.0, 0.5, 0.0, 1.0, 0.0),\n (0.25, 0.75, 0.25, 0.75, 0.25), (0.0, 1.0, 0.0, 0.5, 1.0),\n (0.0, 0.5, 0.0, 1.0, 1.0), (0.0, 0.5, 0.0, 0.5, 1.0),\n (0.25, 0.75, 0.25, 0.75, 0.75), (0.0, 0.5, 0.0, 0.0, 1.0),\n (0.25, 0.75, 0.25, 0.25, 0.75), (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.0, 0.5), (0.0, 0.0, 1.0, 0.5, 0.0),\n (0.25, 0.25, 0.75, 0.25, 0.25), (0.0, 0.0, 1.0, 1.0, 0.5),\n (0.0, 0.0, 0.5, 1.0, 0.5), (0.0, 0.0, 0.5, 1.0, 0.0),\n (0.25, 0.25, 0.75, 0.75, 0.25), (0.0, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.0, 0.5, 1.0, 1.0), (0.0, 0.0, 0.5, 0.5, 1.0),\n (0.25, 0.25, 0.75, 0.75, 0.75), (0.0, 0.0, 0.5, 0.0, 1.0),\n (0.25, 0.25, 0.75, 0.25, 0.75), (0.0, 0.0, 0.0, 1.0, 0.5),\n (0.25, 0.25, 0.25, 0.75, 0.25), (0.0, 0.0, 0.0, 0.5, 1.0),\n (0.25, 0.25, 0.25, 0.75, 0.75), (0.25, 0.25, 0.25, 0.25, 0.75)]\n\n nn_checks = {(1, 1, 1, 1, 1): [(1.0, 1.0, 1.0, 0.5, 1.0),\n (1.0, 1.0, 0.5, 1.0, 1.0),\n (1.0, 0.5, 0.5, 0.5, 0.5),\n (1.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 1.0),\n (1.0, 0.5, 0.5, 1.0, 0.5),\n (1.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 1.0, 0.5, 0.5),\n (1.0, 1.0, 0.5, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 0.5),\n (1.0, 1.0, 1.0, 1.0, 0.5),\n (1.0, 1.0, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 0.5, 1.0),\n (1.0, 0.5, 0.5, 1.0, 1.0),\n (0.5, 1.0, 0.5, 0.5, 1.0),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (1.0, 0.5, 0.5, 0.5, 1.0),\n (1.0, 0.5, 1.0, 1.0, 1.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0.75, 0.75, 0.75, 0.75, 0.75),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (0.5, 1.0, 1.0, 1.0, 1.0),\n (0.5, 1.0, 1.0, 0.5, 1.0),\n (0.5, 1.0, 0.5, 1.0, 1.0)],\n (0.25, 0.75, 0.75, 0.75, 0.25): [(0.5, 1.0, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.5),\n (0, 1, 1, 1, 0),\n (0.5, 1.0, 0.5, 0.5, 0.5),\n (0.5, 1.0, 1.0, 1.0, 0.5),\n (0.0, 1.0, 0.5, 0.5, 0.5),\n (0.0, 1.0, 1.0, 1.0, 0.5),\n (0.5, 1.0, 0.5, 1.0, 0.5),\n (0.0, 1.0, 0.5, 1.0, 0.5),\n (0.5, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.5),\n (0.0, 1.0, 1.0, 0.5, 0.0),\n (0.0, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 1.0, 0.5, 0.0),\n (0.5, 1.0, 0.5, 1.0, 0.0),\n (0.5, 1.0, 0.5, 0.5, 0.0),\n (0.0, 1.0, 0.5, 0.5, 0.0),\n (0.5, 0.5, 1.0, 0.5, 0.0),\n (0.5, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.5, 0.5, 1.0, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 0.5, 1.0, 0.0),\n (0.0, 0.5, 0.5, 0.5, 0.0),\n (0.0, 0.5, 1.0, 1.0, 0.0),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.5, 0.5, 1.0, 1.0, 0.5),\n (\n 0.5, 0.5, 1.0, 0.5, 0.5)],\n (0.0, 0.0, 1.0, 0.5, 1.0): [(0.5, 0.0, 0.5, 0.5, 1.0),\n (0.0, 0.5, 0.5, 0.5, 1.0),\n (0.5, 0.5, 0.5, 0.5, 1.0),\n (0.0, 0.0, 0.5, 0.5, 1.0),\n (0, 0, 1, 1, 1),\n (0.5, 0.5, 1.0, 0.5, 1.0),\n (0.5, 0.0, 1.0, 0.5, 1.0),\n (0.0, 0.5, 1.0, 0.5, 1.0),\n (0, 0, 1, 0, 1),\n (0.5, 0.0, 1.0, 0.5, 0.5),\n (0.0, 0.5, 1.0, 0.5, 0.5),\n (0.5, 0.5, 1.0, 0.5, 0.5),\n (0.0, 0.0, 1.0, 0.5, 0.5),\n (0.5, 0.5, 0.5, 0.5, 0.5),\n (0.0, 0.0, 0.5, 0.5, 0.5),\n (0.25, 0.25, 0.75, 0.75, 0.75),\n (0.5, 0.0, 0.5, 0.5, 0.5),\n (0.0, 0.5, 0.5, 0.5, 0.5), (\n 0.25, 0.25, 0.75, 0.25, 0.75)]}\n\n init_triangulation(5, 1, check, nn_checks)",
"def make_state_appliable_4ch(state):\n size = len(state)\n st_appl = np.zeros((size,)*4, dtype=complex)\n for p1 in range(size):\n for p2 in range(size):\n for p3 in range(size):\n for p4 in range(size):\n st_appl[p1, p2, p3, p4] = state[p1, p2, p3, p4] * sqrt(factorial(p1) * factorial(p2) * factorial(p3) * factorial(p4))\n return st_appl",
"def refugia_adj_5_full_2_iter5 (params, ns):\n #33 parameters \n nu1x, nuA, nu1a, nu2a, nu3a, nu1b, nu2b, nu3b, nu1c, nu2c, nu3c, nu1d, nu2d, nu3d, m0_12, m0_21, m1_12, m1_13, m1_21, m1_23, m1_31, m1_32, m3_12, m3_13, m3_21, m3_23, m3_31, m3_32, T0, T1, T2, T3, T4 = params\n sts = moments.LinearSystem_1D.steady_state_1D(ns[0] + ns[1] + ns[2])\n fs = moments.Spectrum(sts)\n fs = moments.Manips.split_1D_to_2D(fs, ns[0], ns[1] + ns[2])\n ## Population function and migration matrix for T0 (initial split; the definition of this time epoch differentiates this model from refugia_adj_5_simsplit_4epochs)\n nu_T0 = [nu1x, nuA]\n mig0 = numpy.array([[0, m0_12],[m0_21, 0]])\n fs.integrate(nu_T0, T0, m=mig0)\n fs = moments.Manips.split_2D_to_3D_2(fs, ns[1], ns[2])\n ## Population function and migration matrix for T1 (to reflect sum effect of all previous glacial-interglacial cycles)\n nu_T1 = [nu1a, nu2a, nu3a]\n mig1 = numpy.array([[0, m1_12, m1_13],[m1_21, 0, m1_23], [m1_31, m1_32, 0]]) \n fs.integrate(nu_T1, T1, m=mig1)\n ## Population function and migration matrix for T2 (to reflect period of isolation during last glacial)\n nu_T2 = [nu1b, nu2b, nu3b]\n fs.integrate(nu_T2, T2)\n ## Population function and migration matrix for T3 (to reflect inter-glacial expansion)\n nu_T3 = [nu1c, nu2c, nu3c]\n mig3 = numpy.array([[0, m3_12, m3_13],[m3_21, 0, m3_23], [m3_31, m3_32, 0]]) \n fs.integrate(nu_T3, T3, m=mig3)\n ## Population function and migration matrix for T3 (bottleneck to capture single population representation of lineage)\n nu_T4 = [nu1d, nu2d, nu3d]\n fs.integrate(nu_T4, T4) \n return fs",
"def ramp5n(params, phase, args=dict(n=5, guess=[1., 20, 83, 8.1, -0.1])): #-0.16, 4.4, -0.16, 0.43])):\n # 2013-12-07 14:08 IJMC: Created.\n\n return params[0] * (1. - np.exp(-params[1]*phase + params[2]) - \\\n np.exp(-params[3]*phase + params[4]))",
"def fkine_ur5(q):\n \n \n T1 = dh(0.08916, +q[0], 0.0, +pi/2)\n T2 = dh( 0.0, +q[1], -0.425, 0.0)\n T3 = dh( 0.0, +q[2], -0.392, 0.0)\n T4 = dh(0.10915, +q[3], 0.0, +pi/2)\n T5 = dh(0.09465, +pi+q[4], 0.0, +pi/2)\n T6 = dh( 0.0823, +pi+q[5], 0.0, 0.0)\n \n # Efector final con respecto a la base\n T = np.dot(np.dot(np.dot(np.dot(np.dot(T1,T2),T3),T4),T5),T6)\n return T",
"def phosphorene_4band():\n a = 0.222\n ax = 0.438\n ay = 0.332\n theta = 96.79 * (pi / 180)\n phi = 103.69 * (pi / 180)\n\n lat = pb.Lattice(a1=[ax, 0], a2=[0, ay])\n\n h = a * sin(phi - pi / 2)\n s = 0.5 * ax - a * cos(theta / 2)\n lat.add_sublattices(\n ('A', [-s/2, -ay/2, h], 0),\n ('B', [ s/2, -ay/2, 0], 0),\n ('C', [-s/2 + ax/2, 0, 0], 0),\n ('D', [ s/2 + ax/2, 0, h], 0)\n )\n\n lat.register_hopping_energies({\n 't1': -1.22,\n 't2': 3.665,\n 't3': -0.205,\n 't4': -0.105,\n 't5': -0.055\n })\n\n lat.add_hoppings(\n # t1\n ([-1, 0], 'A', 'D', 't1'),\n ([-1, -1], 'A', 'D', 't1'),\n ([ 0, 0], 'B', 'C', 't1'),\n ([ 0, -1], 'B', 'C', 't1'),\n # t2\n ([ 0, 0], 'A', 'B', 't2'),\n ([ 0, 0], 'C', 'D', 't2'),\n # t3\n ([ 0, 0], 'A', 'D', 't3'),\n ([ 0, -1], 'A', 'D', 't3'),\n ([ 1, 1], 'C', 'B', 't3'),\n ([ 1, 0], 'C', 'B', 't3'),\n # t4\n ([ 0, 0], 'A', 'C', 't4'),\n ([ 0, -1], 'A', 'C', 't4'),\n ([-1, 0], 'A', 'C', 't4'),\n ([-1, -1], 'A', 'C', 't4'),\n ([ 0, 0], 'B', 'D', 't4'),\n ([ 0, -1], 'B', 'D', 't4'),\n ([-1, 0], 'B', 'D', 't4'),\n ([-1, -1], 'B', 'D', 't4'),\n # t5\n ([-1, 0], 'A', 'B', 't5'),\n ([-1, 0], 'C', 'D', 't5')\n )\n\n return lat",
"def test_4_1_5D_cube_init(self):\n check = [(0, 0, 0, 0, 0), (1, 1, 1, 1, 1), (1, 0, 0, 0, 0),\n (1, 1, 0, 0, 0),\n (1, 1, 1, 0, 0), (1, 1, 1, 1, 0), (1, 1, 1, 0, 1),\n (1, 1, 0, 1, 0),\n (1, 1, 0, 1, 1), (1, 1, 0, 0, 1), (1, 0, 1, 0, 0),\n (1, 0, 1, 1, 0),\n (1, 0, 1, 1, 1), (1, 0, 1, 0, 1), (1, 0, 0, 1, 0),\n (1, 0, 0, 1, 1),\n (1, 0, 0, 0, 1), (0, 1, 0, 0, 0), (0, 1, 1, 0, 0),\n (0, 1, 1, 1, 0),\n (0, 1, 1, 1, 1), (0, 1, 1, 0, 1), (0, 1, 0, 1, 0),\n (0, 1, 0, 1, 1),\n (0, 1, 0, 0, 1), (0, 0, 1, 0, 0), (0, 0, 1, 1, 0),\n (0, 0, 1, 1, 1),\n (0, 0, 1, 0, 1), (0, 0, 0, 1, 0), (0, 0, 0, 1, 1),\n (0, 0, 0, 0, 1),\n (0.5, 0.5, 0.5, 0.5, 0.5)]\n\n nn_checks = {(0, 1, 0, 1, 1): [(0, 0, 0, 0, 0), (\n 0.5, 0.5, 0.5, 0.5, 0.5), (0, 0, 0, 1, 1), (1, 1, 0, 1, 1),\n (0, 1, 0, 0, 0),\n (0, 1, 0, 0, 1),\n (0, 1, 0, 1, 0),\n (0, 0, 0, 0, 1),\n (1, 1, 1, 1, 1),\n (0, 1, 1, 1, 1),\n (0, 0, 0, 1, 0)]}\n\n init_triangulation(5, 0, check, nn_checks)",
"def ramp4n(params, phase, args=dict(n=5, guess=[1, -3.7e-4, -0.94, 0.087, -1.08])):\n # 2013-12-07 14:08 IJMC: Created.\n\n return params[0] * (1. - np.exp(-params[1]*phase + params[2]) + \\\n params[3] * (phase - 0.5) + \\\n params[4] * (phase - 0.5)**2)",
"def step5(self):\n\t\tself.j = self.k\n\t\tif self.b[self.k] == 'e':\n\t\t\ta = self.m()\n\t\t\tif a > 1 or (a == 1 and not self.cvc(self.k-1)):\n\t\t\t\tself.k = self.k - 1\n\t\tif self.b[self.k] == 'l' and self.doublec(self.k) and self.m() > 1:\n\t\t\tself.k = self.k -1",
"def refugia_adj_5_full_2_iter4 (params, ns):\n #33 parameters \n nu1x, nuA, nu1a, nu2a, nu3a, nu1b, nu2b, nu3b, nu1c, nu2c, nu3c, nu1d, nu2d, nu3d, m0_12, m0_21, m1_12, m1_13, m1_21, m1_23, m1_31, m1_32, m3_12, m3_13, m3_21, m3_23, m3_31, m3_32, T0, T1, T2, T3, T4 = params\n sts = moments.LinearSystem_1D.steady_state_1D(ns[0] + ns[1] + ns[2])\n fs = moments.Spectrum(sts)\n fs = moments.Manips.split_1D_to_2D(fs, ns[0], ns[1] + ns[2])\n ## Population function and migration matrix for T0 (initial split; the definition of this time epoch differentiates this model from refugia_adj_5_simsplit_4epochs)\n nu_T0 = [nu1x, nuA]\n mig0 = numpy.array([[0, m0_12],[m0_21, 0]])\n fs.integrate(nu_T0, T0, m=mig0)\n fs = moments.Manips.split_2D_to_3D_2(fs, ns[1], ns[2])\n ## Population function and migration matrix for T1 (to reflect sum effect of all previous glacial-interglacial cycles)\n nu_T1 = [nu1a, nu2a, nu3a]\n mig1 = numpy.array([[0, m1_12, m1_13],[m1_21, 0, m1_23], [m1_31, m1_32, 0]]) \n fs.integrate(nu_T1, T1, m=mig1)\n ## Population function and migration matrix for T2 (to reflect period of isolation during last glacial)\n nu_T2 = [nu1b, nu2b, nu3b]\n fs.integrate(nu_T2, T2)\n ## Population function and migration matrix for T3 (to reflect inter-glacial expansion)\n nu_T3 = [nu1c, nu2c, nu3c]\n mig3 = numpy.array([[0, m3_12, m3_13],[m3_21, 0, m3_23], [m3_31, m3_32, 0]]) \n fs.integrate(nu_T3, T3, m=mig3)\n ## Population function and migration matrix for T3 (bottleneck to capture single population representation of lineage)\n nu_T4 = [nu1d, nu2d, nu3d]\n fs.integrate(nu_T4, T4) \n return fs",
"def calculate_vn6_over_vn4(vn_data_array, outputFileName):\n vn_data_array = array(vn_data_array)\n nev = len(vn_data_array[:, 0])\n dN = real(vn_data_array[:, 0])\n Q1 = dN*vn_data_array[:, 1]\n Q2 = dN*vn_data_array[:, 2]\n Q3 = dN*vn_data_array[:, 3]\n Q4 = dN*vn_data_array[:, 4]\n Q5 = dN*vn_data_array[:, 5]\n Q6 = dN*vn_data_array[:, 6]\n\n # two-particle correlation\n N2_weight = dN*(dN - 1.)\n Q2_2 = abs(Q2)**2. - dN\n\n # four-particle correlation\n N4_weight = dN*(dN - 1.)*(dN - 2.)*(dN - 3.)\n Q2_4 = ((abs(Q2)**4.) - 2.*real(Q4*conj(Q2)*conj(Q2))\n - 4.*(dN - 2.)*(abs(Q2)**2.) + abs(Q4)**2.\n + 2*dN*(dN - 3.))\n\n # six-particle correlation\n N6_weight = dN*(dN - 1.)*(dN - 2.)*(dN - 3.)*(dN - 4.)*(dN - 5.)\n Q2_6 = (abs(Q2)**6. + 9*(abs(Q4)**2.)*(abs(Q2)**2.)\n - 6.*real(Q4*Q2*conj(Q2)*conj(Q2)*conj(Q2))\n + 4.*real(Q6*conj(Q2)*conj(Q2)*conj(Q2))\n - 12.*real(Q6*conj(Q4)*conj(Q2))\n + 18.*(dN - 4.)*real(Q4*conj(Q2)*conj(Q2))\n + 4.*(abs(Q6)**2.)\n - 9.*(dN - 4.)*((abs(Q2)**4.) + (abs(Q4)**2.))\n + 18.*(dN - 5.)*(dN - 2.)*(abs(Q2)**2.)\n - 6.*dN*(dN - 4.)*(dN - 5.))\n\n # calcualte observables with Jackknife resampling method\n r2_array = zeros(nev)\n gamma1_array = zeros(nev)\n for iev in range(nev):\n array_idx = [True]*nev\n array_idx[iev] = False\n array_idx = array(array_idx)\n\n # C_n{4}\n C_2_2 = mean(Q2_2[array_idx])/mean(N2_weight[array_idx])\n C_2_4 = (mean(Q2_4[array_idx])/mean(N4_weight[array_idx])\n - 2.*(C_2_2**2.))\n C_2_6 = (mean(Q2_6[array_idx])/mean(N6_weight[array_idx])\n - 9.*C_2_2*mean(Q2_4[array_idx])/mean(N4_weight[array_idx])\n + 12.*(C_2_2**3.))\n if C_2_6 > 0. and C_2_4 < 0. and C_2_2 > 0.:\n v2_2 = sqrt(C_2_2)\n v2_6 = (C_2_6/4.)**(1./6.)\n v2_4 = (-C_2_4)**(1./4.)\n r2_array[iev] = v2_6/v2_4\n gamma1_array[iev] = (-6.*sqrt(2)*(v2_4**2.)*(v2_4 - v2_6)\n /(v2_2**2. - v2_4**2.)**(1.5))\n\n r2_mean = mean(r2_array)\n r2_err = sqrt((nev - 1.)/nev*sum((r2_array - r2_mean)**2.))\n gamma1_mean = mean(gamma1_array)\n gamma1_err = sqrt((nev - 1.)/nev*sum((gamma1_array - gamma1_mean)**2.))\n\n f = open(outputFileName, 'w')\n f.write(\n \"# n vn{6}/vn{4} (vn{6}/vn{4})_err gamma_1 gamma_1_err\\n\")\n f.write(\"%d %.10e %.10e %.10e %.10e\\n\"\n % (2, r2_mean, r2_err, gamma1_mean, gamma1_err))\n f.close()\n return",
"def refugia_adj_5_simsplit_4epochs_iter5 (params, ns):\n #28 parameters \n nu1a, nu2a, nu3a, nu1b, nu2b, nu3b, nu1c, nu2c, nu3c, nu1d, nu2d, nu3d, m1_12, m1_13, m1_21, m1_23, m1_31, m1_32, m3_12, m3_13, m3_21, m3_23, m3_31, m3_32, T1, T2, T3, T4 = params\n sts = moments.LinearSystem_1D.steady_state_1D(ns[0] + ns[1] + ns[2])\n fs = moments.Spectrum(sts)\n fs = moments.Manips.split_1D_to_2D(fs, ns[0], ns[1] + ns[2]) \n fs = moments.Manips.split_2D_to_3D_2(fs, ns[1], ns[2])\n ## Population function and migration matrix for T1 (to reflect sum effect of all previous glacial-interglacial cycles)\n nu_T1 = [nu1a, nu2a, nu3a]\n mig1 = numpy.array([[0, m1_12, m1_13],[m1_21, 0, m1_23], [m1_31, m1_32, 0]]) \n fs.integrate(nu_T1, T1, m=mig1)\n ## Population function and migration matrix for T2 (to reflect period of isolation during last glacial)\n nu_T2 = [nu1b, nu2b, nu3b]\n fs.integrate(nu_T2, T2)\n ## Population function and migration matrix for T3 (to reflect inter-glacial expansion)\n nu_T3 = [nu1c, nu2c, nu3c]\n mig3 = numpy.array([[0, m3_12, m3_13],[m3_21, 0, m3_23], [m3_31, m3_32, 0]]) \n fs.integrate(nu_T3, T3, m=mig3)\n ## Population function and migration matrix for T3 (bottleneck to capture single population representation of lineage)\n nu_T4 = [nu1d, nu2d, nu3d]\n fs.integrate(nu_T4, T4) \n return fs",
"def test_dphase(self):\n model = BDF(debug=False)\n node1, c1, t1 = 100, 3, 0.3\n node2, c2, t2 = 101, 4, 0.4\n sid = 42\n card_lines = ['DPHASE', sid, node1, c1, t1, node2, c2, t2]\n model.add_card(card_lines, card_lines[0], comment='', is_list=True,\n has_none=True)\n model.add_grid(100, [0., 0., 0.])\n model.add_grid(101, [0., 0., 0.])\n model.validate()\n model.cross_reference()\n #print(model.dphases[42])\n save_load_deck(model)",
"def smooth5(size: int) -> int:\n if size < 6:\n return size\n if not size % 2:\n return size\n\n new = np.inf\n power5 = 1\n while power5 < size:\n power35 = power5\n while power35 < size:\n power2 = 2 ** ((-int(-size // power35) - 1).bit_length())\n n = power2 * power35\n if n == size:\n return new\n elif n < new:\n new = n\n power35 *= 3\n if power35 == size:\n return new\n if power35 < new:\n new = power35\n power5 *= 5\n if power5 == size:\n return new\n if power5 < new:\n new = power5\n return new",
"def ex_ridme5(param): \r\n param = _parsargs(param, npar=6) \r\n\r\n # Dipolar pathways\r\n lam = param.copy()\r\n pathways = [[] for _ in lam]\r\n pathways[0] = [lam[0]]\r\n pathways[1] = [lam[1], 0, 1]\r\n pathways[2] = [lam[2], 0, 2]\r\n pathways[3] = [lam[3], 0, 3]\r\n pathways[4] = [lam[4], 0, 4]\r\n pathways[5] = [lam[5], 0, 5]\r\n return pathways",
"def prob4(d = 500): \n #import the plane data\n planeData = np.load(\"plane.npy\")\n \n tplane = planeData[:,0]\n alpha = np.deg2rad(planeData[:,1])\n beta = np.deg2rad(planeData[:,2])\n \n l = len(tplane)\n \n #define x and y functions\n def x(n):\n# Gives x position\n return d * np.tan(beta[n]) / (np.tan(beta[n]) - np.tan(alpha[n]))\n def y(n):\n# Gives y position\n return d * np.tan(beta[n]) * np.tan(alpha[n]) / (np.tan(beta[n]) - np.tan(alpha[n]))\n \n #define x and y prime as we will see them\n def xprime(n):\n# Gives the approximate derivative of x\n if n == 0:\n return fdq1(x, n, h = 1)\n elif n == l-1:\n return bdq1(x, n, h = 1)\n elif n > 0 and n < l:\n return cdq2(x, n, h = 1)\n else:\n return 0\n \n def yprime(n):\n# Gives the approximate derivative of y\n if n == 0:\n return fdq1(y, n, h = 1)\n elif n == l-1:\n return bdq1(y, n, h = 1)\n elif n > 0 and n < l:\n return cdq2(y, n, h = 1)\n else:\n return 0\n \n #define speed from x and y prime\n def speed(n):\n# print(\"speed(n) where n = \" + str(n))\n return np.sqrt((xprime(n))**2 + (yprime(n))**2)\n \n #Finally get the speed from the information we have\n spd = []\n X = []\n Y = []\n for i in range(0, l):\n spd.append(speed(i))\n X.append(x(i))\n Y.append(y(i))\n \n return spd\n \n raise NotImplementedError(\"Problem 4 Incomplete\")",
"def ramp4p(params, phase, args=dict(n=5, guess=[1, -0.068, 2.33, 0.933, -20.5])):\n # 2013-12-07 14:08 IJMC: Created.\n\n return params[0] * (1. + np.exp(-params[1]*phase + params[2]) + \\\n params[3] * (phase - 0.5) + \\\n params[4] * (phase - 0.5)**2)",
"def eo_edges(self):\n logger.info(\"eo_edges called\")\n permutations = []\n original_state = self.state[:]\n original_solution = self.solution[:]\n tmp_solution_len = len(self.solution)\n\n # Build a list of the wing strings at each midge\n wing_strs = []\n\n for _, square_index, partner_index in midges_recolor_tuples_555:\n square_value = self.state[square_index]\n partner_value = self.state[partner_index]\n wing_str = square_value + partner_value\n wing_str = wing_str_map[square_value + partner_value]\n wing_strs.append(wing_str)\n\n # build a list of all possible EO permutations...an even number of edges must be high\n for num in range(4096):\n num = str(bin(num)).lstrip(\"0b\").zfill(12)\n if num.count(\"1\") % 2 == 0:\n permutations.append(list(map(int, num)))\n\n # Put all 2048 starting states in a file and point ida-via-graph\n # at the file so it can solve all of them and apply the one that is the shortest.\n lr_center_stage_states = []\n eo_outer_orbit_states = []\n eo_inner_orbit_states = []\n\n for permutation in permutations:\n must_be_uppercase = []\n must_be_lowercase = []\n self.state = original_state[:]\n\n for wing_str, uppercase in zip(wing_strs, permutation):\n if uppercase:\n must_be_uppercase.append(wing_str)\n else:\n must_be_lowercase.append(wing_str)\n\n # logger.info(\"%s: %s permutation %s\" % (self, index, \"\".join(map(str, permutation))))\n self.edges_flip_orientation(must_be_uppercase, must_be_lowercase)\n\n # build lists of the states that we need to find state_indexes for\n lr_center_stage_states.append(self.lt_phase3_lr_center_stage.state())\n eo_outer_orbit_states.append(self.lt_phase3_eo_outer_orbit.state())\n eo_inner_orbit_states.append(self.lt_phase3_eo_inner_orbit.state())\n\n # now we have a huge list of states to lookup, do a binary search on multiple states at once (this is drastically faster\n # than binary searching for them individually). state_index_multiple() will return a dict where the state is the key\n # and the state_index is the value.\n lr_center_stage_eo_inner_orbit_state_indexes = self.lt_phase3_lr_center_stage.state_index_multiple(\n lr_center_stage_states\n )\n eo_outer_orbit_state_indexes = self.lt_phase3_eo_outer_orbit.state_index_multiple(eo_outer_orbit_states)\n eo_inner_orbit_state_indexes = self.lt_phase3_eo_inner_orbit.state_index_multiple(eo_inner_orbit_states)\n\n # build a list of tuples of the state indexes\n pt_state_indexes = []\n for lr_center_stage_eo_inner_orbit_state, eo_outer_orbit_state, eo_inner_orbit_state in zip(\n lr_center_stage_states, eo_outer_orbit_states, eo_inner_orbit_states\n ):\n pt_state_indexes.append(\n (\n lr_center_stage_eo_inner_orbit_state_indexes[lr_center_stage_eo_inner_orbit_state],\n eo_outer_orbit_state_indexes[eo_outer_orbit_state],\n eo_inner_orbit_state_indexes[eo_inner_orbit_state],\n )\n )\n\n self.state = original_state[:]\n self.solution = original_solution[:]\n\n # When solve_via_c is passed pt_state_indexes (2048 lines of states in this case), it will try all 2048 of them\n # to find the state that has the shortest solution.\n self.lt_phase3.solve_via_c(pt_states=pt_state_indexes)\n\n self.print_cube_add_comment(\"edges EOed into high/low groups\", tmp_solution_len)\n self.post_eo_state = self.state[:]\n self.post_eo_solution = self.solution[:]\n\n # re-color the cube so that the edges are oriented correctly so we can\n # pair 4-edges then 8-edges. After all edge pairing is done we will uncolor\n # the cube and re-apply the solution.\n self.edges_flip_orientation(wing_strs, [])\n self.highlow_edges_print()",
"def makeTAPE5(self):\n\n wn1, wn2 = self.wnLims\n\n # loop through each HITRAN molecule and create an associated TAPE5\n allT5 = []\n for iMol, mol in enumerate(self.mols):\n base = os.path.basename(mol)\n print(base)\n tape5 = 'TAPE5_%s' % base\n\n # LNFL TAPE5 records \n # (see lnfl_instructions document in LNFL release)\n rec1 = '$ %s' % base\n rec2 = '%10.3f%10.3f' % (wn1-25, wn2+25)\n\n # start off with all molecules off, then turn iMol on, then \n # generate a single string instead of a list of characters\n # and append \n rec3 = ['0'] * self.nMols\n rec3[iMol] = '1'\n rec3 = ''.join(rec3) + ' NBLK1 NOCPL LNOUT '\n end = '%%%%%'\n\n outDat = [rec1, rec2]\n\n # line coupling molecules\n if base in ['02_CO2', '06_CH4', '07_O2']:\n rec3 = rec3.replace('NOCPL', 'MRG2')\n rec4 = [' '] * self.nMols\n rec4[iMol] = '1'\n rec4 = ''.join(rec4)\n outDat += [rec3, rec4]\n else:\n outDat.append(rec3)\n # endif coupling\n\n outDat.append(end)\n\n # now write TAPE5\n outFP = open(tape5, 'w')\n for line in outDat: outFP.write('%s\\n' % line)\n outFP.close()\n\n # copy TAPE5 to subdirectory for molecule in buildDir\n target = '%s/%s' % (self.dirT5, tape5)\n if os.path.exists(target):\n print('WARNING: overwriting %s' % target)\n # endif target check\n os.rename(tape5, target)\n\n allT5.append(target)\n # end molecule loop\n\n self.allT5 = list(allT5)\n return self",
"def processPhaseHeight(self, phasesInRing1, phasesInRing2):\n P11, P12, P21, P22 = ([] for i in range(4))\n phaseHeightDictionary = {}\n\n [P11.append(index+1)for index, value in enumerate(self.phaseDurationList)\n if value > 0.0 and index < 2]\n [P12.append(index+1)for index, value in enumerate(self.phaseDurationList)\n if value > 0.0 and index >= 2 and index < 4]\n [P21.append(index+1)for index, value in enumerate(self.phaseDurationList)\n if value > 0.0 and index >= 4 and index < 6]\n [P22.append(index+1)for index, value in enumerate(self.phaseDurationList)\n if value > 0.0 and index >= 6 and index < 8]\n\n if (len(P11) == len(P21)):\n for index in range(len(P11)):\n if len(P11) > 0:\n phaseHeightDictionary[str(P11[index])] = 10\n\n for index in range(len(P21)):\n if len(P21) > 0:\n phaseHeightDictionary[str(P21[index])] = 10\n\n elif (len(P11) < len(P21)):\n for index in range(len(P11)):\n if len(P11) > 0:\n phaseHeightDictionary[str(P11[index])] = 20\n\n for index in range(len(P21)):\n if len(P21) > 0:\n phaseHeightDictionary[str(P21[index])] = 10\n\n elif (len(P11) > len(P21)):\n for index in range(len(P11)):\n if len(P11) > 0:\n phaseHeightDictionary[str(P11[index])] = 10\n\n for index in range(len(P21)):\n if len(P21) > 0:\n phaseHeightDictionary[str(P21[index])] = 20\n\n if (len(P12) == len(P22)):\n for index in range(len(P12)):\n if len(P12) > 0:\n phaseHeightDictionary[str(P12[index])] = 10\n\n for index in range(len(P22)):\n if len(P22) > 0:\n phaseHeightDictionary[str(P22[index])] = 10\n\n elif (len(P12) < len(P22)):\n for index in range(len(P12)):\n if len(P12) > 0:\n phaseHeightDictionary[str(P12[index])] = 20\n for index in range(len(P22)):\n if len(P22) > 0:\n phaseHeightDictionary[str(P22[index])] = 10\n\n elif (len(P12) > len(P22)):\n for index in range(len(P12)):\n if len(P12) > 0:\n phaseHeightDictionary[str(P12[index])] = 10\n for index in range(len(P22)):\n if len(P22) > 0:\n phaseHeightDictionary[str(P22[index])] = 20\n\n for phase in phasesInRing1:\n for key, value in phaseHeightDictionary.items():\n if int(key) == phase:\n self.phaseHeightInRing1.append(value)\n\n for phase in phasesInRing2:\n for key, value in phaseHeightDictionary.items():\n if int(key) == phase:\n self.phaseHeightInRing2.append(value)",
"def step4(self):\n\t\tif self.b[self.k - 1] == 'a':\n\t\t\tif self.ends(\"al\"): pass\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 'c':\n\t\t\tif self.ends(\"ance\"): pass\n\t\t\telif self.ends(\"ence\"): pass\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 'e':\n\t\t\tif self.ends(\"er\"): pass\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 'i':\n\t\t\tif self.ends(\"ic\"): pass\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 'l':\n\t\t\tif self.ends(\"able\"): pass\n\t\t\telif self.ends(\"ible\"): pass\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 'n':\n\t\t\tif self.ends(\"ant\"): pass\n\t\t\telif self.ends(\"ement\"): pass\n\t\t\telif self.ends(\"ment\"): pass\n\t\t\telif self.ends(\"ent\"): pass\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 'o':\n\t\t\tif self.ends(\"ion\") and (self.b[self.j] == 's' or self.b[self.j] == 't'): pass\n\t\t\telif self.ends(\"ou\"): pass\n\t\t\t# takes care of -ous\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 's':\n\t\t\tif self.ends(\"ism\"): pass\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 't':\n\t\t\tif self.ends(\"ate\"): pass\n\t\t\telif self.ends(\"iti\"): pass\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 'u':\n\t\t\tif self.ends(\"ous\"): pass\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 'v':\n\t\t\tif self.ends(\"ive\"): pass\n\t\t\telse: return\n\t\telif self.b[self.k - 1] == 'z':\n\t\t\tif self.ends(\"ize\"): pass\n\t\t\telse: return\n\t\telse:\n\t\t\treturn\n\t\tif self.m() > 1:\n\t\t\tself.k = self.j",
"def rk5(accel,m,r,h,v):\n k1v = accel(m,r)\n k1r = v\n k2v = accel(m,r + 0.25*k1r*h)\n k2r = v + (0.25*k1v)*h\n k3v = accel(m,r + (3/32.*k1r + 9/32.*k2r)*h)\n k3r = v + (3/32.*k1v + 9/32.*k2v)*h\n k4v = accel(m,r + (1932/2197.*k1r - 7200/2197.*k2r + 7296/2197.*k3r)*h)\n k4r = v + (1932/2197.*k1v - 7200/2197.*k2v + 7296/2197.*k3v)*h\n k5v = accel(m,r + (439/216.*k1r - 8*k2r + 3680/513.*k3r - 845/4104.*k4r)*h)\n k5r = v + (439/216.*k1v - 8*k2v + 3680/513.*k3v - 845/4104.*k4v)*h\n k6v = accel(m,r - (8/27.*k1r + 2*k2r - 3544/2565.*k3r + 1859/4104.*k4r - 11/40.*k5r)*h)\n k6r = v - (8/27.*k1v + 2*k2v - 3544/2565.*k3v + 1859/4104.*k4v - 11/40.*k5v)*h\n\n # 5th order calculation\n new_v5 = v + h*(16/135.*k1v + 6656/12825.*k3v+28561/56430.*k4v - 9/50.*k5v + 2/55.*k6v) \n new_r5 = r + h*(16/135.*k1r + 6656/12825.*k3r+28561/56430.*k4r - 9/50.*k5r + 2/55.*k6r) \n \n return new_v5, new_r5",
"def ikine_pose_ur5(xdes, dxdes, ddxdes, q0): \n k_p = 550;\n k_o = 150;\n k = np.diag([k_p, k_p, k_p, k_o, k_o, k_o, k_o])\n best_norm_e1 = 0.01\n best_norm_e2 = 0.01\n max_iter = 20\n delta = 0.001\n dq_p\t\t\t= np.zeros(6)\n\n q = copy(q0)\n for i in range(max_iter):\n T = fkine_ur5(q)\n e1 = xdes[0:3] - T[0:3,3]\n e2 = quatError(xdes[3:7], rot2quat(T[0:3,0:3]))\n e = np.concatenate((e1,e2), axis=0)\n de = -np.dot(k,e)\n J = jacobian_pose_ur5(q,delta)\n Jinv = np.linalg.pinv(J)\n dq = np.dot(Jinv, dxdes - de )\n q = q + delta*dq\n \n if (np.linalg.norm(e2) < best_norm_e2) & (np.linalg.norm(e1)< best_norm_e1):\n\n best_norm_e2 = np.linalg.norm(e2)\n best_norm_e1 = np.linalg.norm(e1)\n q_best = q\n dq_best = dq\n ddq_best \t\t= \t(dq_best - dq_p)/delta\n #ddq_best = np.dot(Jinv, ( ddxdes - np.dot(dJ,dq_best) ))\n print(\"iter: \", i)\n print(\"norma position: \",best_norm_e1)\n print(\"norma orientation: \",best_norm_e2)\n #print(\"---------\")\n\n \tdq_p \t= dq\n return q_best, dq_best, ddq_best",
"def hash_flow(flow_5_tuple):\n ip_A = flow_5_tuple[0]\n ip_B = flow_5_tuple[1]\n tp_src = flow_5_tuple[2]\n tp_dst = flow_5_tuple[3]\n proto = flow_5_tuple[4]\n if proto == 6:\n #*** Is a TCP flow:\n if ip_A > ip_B:\n direction = 1\n elif ip_B > ip_A:\n direction = 2\n elif tp_src > tp_dst:\n direction = 1\n elif tp_dst > tp_src:\n direction = 2\n else:\n direction = 1\n else:\n #*** Isn't a flow, so arbitrarily set direction as 1:\n direction = 1\n if direction == 1:\n flow_tuple = (ip_A, ip_B, tp_src, tp_dst, proto)\n else:\n #*** Flip direction:\n flow_tuple = (ip_B, ip_A, tp_dst, tp_src, proto)\n return hash_tuple(flow_tuple)",
"def endgame_score_connectfour(board, is_current_player_maximizer) :\n chains=sorted(board.get_all_chains(), key=lambda x: len(x))\n if len(chains[-1])>=4:\n if not is_current_player_maximizer:\n return 1000;\n else:\n return -1000;\n return 0;"
] | [
"0.56203735",
"0.5353156",
"0.5339077",
"0.53312033",
"0.52918285",
"0.52905166",
"0.5248838",
"0.52097756",
"0.5169619",
"0.51692957",
"0.51207316",
"0.50766826",
"0.5064046",
"0.5053413",
"0.50450885",
"0.50349313",
"0.503351",
"0.49939486",
"0.49711737",
"0.49670827",
"0.49572968",
"0.49537754",
"0.4938196",
"0.4930792",
"0.48999292",
"0.48927122",
"0.48653322",
"0.48583114",
"0.4843019",
"0.4833436"
] | 0.58114266 | 0 |
phase1 stages the centers on sides L and R phase2 stages the centers on sides F and B and put the LR centers in one of 495 states that can be solved without L L' R R'...this is prep work for phase 3 TODO this needs more work BLBFRUFRDDFBUULBRLBRRLDLDLFURFLUBUDRRRDDFDFBBLUFRUFFBBFBLLLDBDFBDBLFDUUFRFBLDUDDURFDRBBDFUUFUBFBDLULDLRRUDFDFULLLUUBUDRLURLBBDURFRBULBRFRBRDRRULDFLFLR results in "5x5x5 edge swaps are odd, cannot pair edges" | def group_centers_phase1_and_2(self) -> None:
self.rotate_U_to_U()
self.rotate_F_to_F()
if self.centers_staged():
return
original_state = self.state[:]
original_solution = self.solution[:]
tmp_solution_len = len(self.solution)
# find multiple phase1 solutions
phase1_solutions = self.lt_LR_centers_stage.solutions_via_c(solution_count=100)
pt_state_indexes = []
pt_state_indexes_LR_centers_special = []
phase2_pt_state_indexes_to_phase1_solution = {}
logger.info(f"found {len(phase1_solutions)} phase1 solutions")
# find the phase2 solution for each phase1 solution
for phase1_solution, (pt0_state, pt1_state, pt2_state, pt3_state, pt4_state) in phase1_solutions:
self.state = original_state[:]
self.solution = original_solution[:]
for step in phase1_solution:
self.rotate(step)
# stage the LR centers
phase2_pt_state_indexes = tuple([pt.state_index() for pt in self.lt_FB_centers_stage.prune_tables])
pt_state_indexes.append(phase2_pt_state_indexes)
phase2_pt_state_indexes_to_phase1_solution[phase2_pt_state_indexes] = phase1_solution
# stage the LR centers and put them into one of 495 states solveable with L L' R R'
phase2_pt_state_indexes = tuple(
[pt.state_index() for pt in self.lt_FB_centers_stage_LR_centers_special.prune_tables]
)
pt_state_indexes_LR_centers_special.append(phase2_pt_state_indexes)
phase2_pt_state_indexes_to_phase1_solution[phase2_pt_state_indexes] = phase1_solution
self.state = original_state[:]
self.solution = original_solution[:]
# stage the FB centers
phase2_solutions = self.lt_FB_centers_stage.solutions_via_c(pt_states=pt_state_indexes, solution_count=1)
phase2_solution = phase2_solutions[0][0]
# stage the FB centers and put LR centers into one of 495 states solveable with L L' R R'
phase2_solutions_lr_centers_special = self.lt_FB_centers_stage_LR_centers_special.solutions_via_c(
pt_states=pt_state_indexes_LR_centers_special, solution_count=1
)
phase2_solution_lr_centers_special = phase2_solutions_lr_centers_special[0][0]
# if we can put the LR centers into one of 495 states without adding to the move count, make it so
if len(phase2_solution_lr_centers_special) <= len(phase2_solution):
min_phase2_solution, (
pt0_state,
pt1_state,
pt2_state,
pt3_state,
pt4_state,
) = phase2_solutions_lr_centers_special[0]
min_phase1_solution = phase2_pt_state_indexes_to_phase1_solution[pt0_state, pt1_state, pt2_state]
else:
min_phase2_solution, (pt0_state, pt1_state, pt2_state, pt3_state, pt4_state) = phase2_solutions[0]
min_phase1_solution = phase2_pt_state_indexes_to_phase1_solution[pt0_state, pt1_state]
logger.info(
f"phase2 solution length {len(phase2_solution)}, phase2_lr_centers_special solution length {len(phase2_solution_lr_centers_special)}"
)
for step in min_phase1_solution:
self.rotate(step)
self.print_cube_add_comment("LR centers staged", tmp_solution_len)
tmp_solution_len = len(self.solution)
for step in min_phase2_solution:
self.rotate(step)
self.print_cube_add_comment("UD FB centers staged", tmp_solution_len) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def chain_corrections():\n \n #read the files\n sample_4m=read_sample(map_files('sample_4m'))\n empty_cell_4m=read_sample(map_files('empty_cell_4m'))\n empty_4m=read_sample(map_files('empty_4m'))\n transmission_sample_cell_4m=read_sample(map_files('trans_sample_4m'))\n transmission_empty_cell_4m=read_sample(map_files('trans_empty_cell_4m'))\n blocked_beam_4m=read_sample(map_files('blocked_4m'))\n sensitivity=read_div(map_files('div'))\n #mask=read_sample(map_files('mask'))\n \n #normalize the monitors\n \n sample_4m_norm=monitor_normalize(sample_4m)\n empty_cell_4m_norm=monitor_normalize(empty_cell_4m)\n transmission_sample_cell_4m_norm=monitor_normalize(transmission_sample_cell_4m)\n transmission_empty_cell_4m_norm=monitor_normalize(transmission_empty_cell_4m)\n empty_4m_norm=monitor_normalize(empty_4m)\n blocked_beam_4m_norm=monitor_normalize(blocked_beam_4m)\n \n #calculate q\n sample_4m_norm_q=convert_q(sample_4m_norm)\n empty_cell_4m_norm_q=convert_q(empty_cell_4m)\n blocked_beam_4m_norm_q=convert_q(blocked_beam_4m_norm)\n transmission_sample_cell_4m_norm_q=convert_q(transmission_sample_cell_4m_norm)\n transmission_empty_cell_4m_norm_q=convert_q(transmission_empty_cell_4m_norm)\n empty_4m_norm_q=convert_q(empty_4m_norm)\n \n \n print 'converted'\n #convert flatness\n sample_4m_solid=correct_solid_angle(sample_4m_norm_q)\n empty_cell_4m_solid=correct_solid_angle(empty_cell_4m_norm_q)\n blocked_beam_4m_solid=correct_solid_angle(blocked_beam_4m_norm_q)\n transmission_sample_cell_4m_solid=correct_solid_angle(transmission_sample_cell_4m_norm_q)\n transmission_empty_cell_4m_solid=correct_solid_angle(transmission_empty_cell_4m_norm_q)\n empty_4m_solid=correct_solid_angle(empty_4m_norm_q)\n \n \n #calculate transmission\n coord_left=(60,60)\n coord_right=(70,70)\n transmission_sample_cell_4m_rat=generate_transmission(transmission_sample_cell_4m_solid,empty_4m_solid,\n coord_left,coord_right)\n transmission_empty_cell_4m_rat=generate_transmission(transmission_empty_cell_4m_solid,empty_4m_solid,\n coord_left,coord_right)\n print 'Sample transmission= {} (IGOR Value = 0.724)'.format(transmission_sample_cell_4m_rat)\n print 'Empty Cell transmission= {} (IGOR Value = 0.929)'.format(transmission_empty_cell_4m_rat)\n print 'hi'\n \n #Initial Correction -- Not with the sub/mult tools,\n #SAM = sample_4m_solid.data\n #print SAM.x\n #EMP = empty_4m_solid.data\n #print \"EMP: \"\n #print EMP.x\n #BGD = blocked_beam_4m_solid.data\n #print \"BGD\"\n #print BGD.x\n #Tsam = transmission_sample_cell_4m_rat\n #Temp = transmission_empty_cell_4m_rat\n #COR1 = SAM.__sub__(BGD)\n #COR2 = (EMP.__sub__(BGD)).__mul__(Tsam/Temp)\n #COR = COR1.__sub__(COR2)\n #print \"after initial correction: \"\n #print COR.x\n \n SAM = sample_4m_solid\n print SAM.data.x\n EMP = empty_4m_solid\n print \"EMP: \"\n print EMP.data.x\n BGD = blocked_beam_4m_solid\n print \"BGD:\"\n print BGD.data.x\n Tsam = transmission_sample_cell_4m_rat\n Temp = transmission_empty_cell_4m_rat\n print \"COR1:\"\n COR1 = SAM.__sub1__(BGD)\n print COR1.data.x #check=works\n #-----Problems Here-------\n print \"COR2:\"\n COR2 = (EMP.__sub1__(BGD)) #check=works\n print COR2.data.x\n print \"COR3:\"\n #AJJ - __mul__ not working because Tsam and Temp are Measurement instances and not simply floats. See above.\n COR3 = COR2.__mul__(Tsam/Temp) #mul not working\n print COR3.data.x\n #COR = COR1.__sub1__(COR2)\n #print \"after initial correction: \"\n #print COR.x\n #COR2 = (EMP.__sub__(BGD)).__mul__(Tsam/Temp)\n #COR = COR1.__sub__(COR2)\n #print \"after initial correction: \"\n #print COR.data.x",
"def center_flows(L_wprime, U_wprime, L_w3, U_w3, L_overlap, U_overlap):\n # examine every possible point\n current_dist_to_edge = -1\n point = (0,0)\n #print(\"w3 range: [{}, {}]\".format(L_w3, U_w3))\n #print(\"w' range: [{}, {}]\".format(L_wprime, U_wprime))\n #print(\"overlap range: [{},{}]\".format(L_overlap, U_overlap))\n for y in range(L_w3, U_w3 + 1):\n #print(\"y={}\".format(y))\n LH_bound = max(L_wprime, L_overlap - y)\n #print(\"LH bound = {}\".format(LH_bound))\n RH_bound = min(U_wprime, U_overlap - y)\n #print(\"RH bound = {}\".format(RH_bound))\n for x in range(LH_bound, RH_bound + 1):\n # w3 UB: 0x + 1y - U_w3 = 0\n # w3 LB: 0x + 1y - L_w3 = 0\n # wprime UB: 1x + 0y - U_wprime\n # wprime LB: 1x + 0y - L_wprime\n # wprime + w3 UB: 1x + 1y - U_wprime,wk\n # wprime + w3 LB: 1x + 1y - L_wprime,wk\n dist_to_edge = min(distance_point_to_line(x, y, 0, -1, U_w3), #0x-1y+U_w3=0\n distance_point_to_line(x, y, 0, -1, L_w3), #0x-1y+L_w3=0\n # -1x + 0y + U_wprime = 0\n distance_point_to_line(x, y, -1, 0, U_wprime),\n # -1x + 0y + L_wprime = 0\n distance_point_to_line(x, y, -1, 0, L_wprime),\n # -1x - 1y + U_overlap = 0\n distance_point_to_line(x, y, -1, -1, U_overlap),\n # -1 x - y + L_overlap = 0\n distance_point_to_line(x, y, -1, -1, L_overlap))\n if dist_to_edge > current_dist_to_edge:\n #print(\"At point ({},{}), distance to edge increased from {} to {}.\"\\\n # .format(x,y,current_dist_to_edge,dist_to_edge))\n current_dist_to_edge = dist_to_edge\n point = (x,y)\n return(point)",
"def frame3dlin_Kg(E,A1,A2,L,Te1,Te2,R=None):\n Kge1= np.array([\n [0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0],\n [0 , -((3*A2+3*A1)*E)/(5*L**2) , 0 , 0 , 0 , -(A2*E)/(10*L) , 0 , ((3*A2+3*A1)*E)/(5*L**2) , 0 , 0 , 0 , -(A1*E)/(10*L)],\n [0 , 0 , -((3*A2+3*A1)*E)/(5*L**2) , 0 , (A2*E)/(10*L) , 0 , 0 , 0 , ((3*A2+3*A1)*E)/(5*L**2) , 0 , (A1*E)/(10*L) , 0],\n [0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0],\n [0 , 0 , (A2*E)/(10*L) , 0 , -((A2+3*A1)*E)/30 , 0 , 0 , 0 , -(A2*E)/(10*L) , 0 , ((A2+A1)*E)/60 , 0],\n [0 , -(A2*E)/(10*L) , 0 , 0 , 0 , -((A2+3*A1)*E)/30 , 0 , (A2*E)/(10*L) , 0 , 0 , 0 , ((A2+A1)*E)/60],\n [0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0],\n [0 , ((3*A2+3*A1)*E)/(5*L**2) , 0 , 0 , 0 , (A2*E)/(10*L) , 0 , -((3*A2+3*A1)*E)/(5*L**2) , 0 , 0 , 0 , (A1*E)/(10*L)],\n [0 , 0 , ((3*A2+3*A1)*E)/(5*L**2) , 0 , -(A2*E)/(10*L) , 0 , 0 , 0 , -((3*A2+3*A1)*E)/(5*L**2) , 0 , -(A1*E)/(10*L) , 0],\n [0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0],\n [0 , 0 , (A1*E)/(10*L) , 0 , ((A2+A1)*E)/60 , 0 , 0 , 0 , -(A1*E)/(10*L) , 0 , -((3*A2+A1)*E)/30 , 0],\n [0 , -(A1*E)/(10*L) , 0 , 0 , 0 , ((A2+A1)*E)/60 , 0 , (A1*E)/(10*L) , 0 , 0 , 0 , -((3*A2+A1)*E)/30]\n ])\n Kge2= np.array([\n [0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0],\n [0 , ((3*A2+3*A1)*E)/(5*L**2) , 0 , 0 , 0 , (A2*E)/(10*L) , 0 , -((3*A2+3*A1)*E)/(5*L**2) , 0 , 0 , 0 , (A1*E)/(10*L)],\n [0 , 0 , ((3*A2+3*A1)*E)/(5*L**2) , 0 , -(A2*E)/(10*L) , 0 , 0 , 0 , -((3*A2+3*A1)*E)/(5*L**2) , 0 , -(A1*E)/(10*L) , 0],\n [0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0],\n [0 , 0 , -(A2*E)/(10*L) , 0 , ((A2+3*A1)*E)/30 , 0 , 0 , 0 , (A2*E)/(10*L) , 0 , -((A2+A1)*E)/60 , 0],\n [0 , (A2*E)/(10*L) , 0 , 0 , 0 , ((A2+3*A1)*E)/30 , 0 , -(A2*E)/(10*L) , 0 , 0 , 0 , -((A2+A1)*E)/60],\n [0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0],\n [0 , -((3*A2+3*A1)*E)/(5*L**2) , 0 , 0 , 0 , -(A2*E)/(10*L) , 0 , ((3*A2+3*A1)*E)/(5*L**2) , 0 , 0 , 0 , -(A1*E)/(10*L)],\n [0 , 0 , -((3*A2+3*A1)*E)/(5*L**2) , 0 , (A2*E)/(10*L) , 0 , 0 , 0 , ((3*A2+3*A1)*E)/(5*L**2) , 0 , (A1*E)/(10*L) , 0],\n [0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0],\n [0 , 0 , -(A1*E)/(10*L) , 0 , -((A2+A1)*E)/60 , 0 , 0 , 0 , (A1*E)/(10*L) , 0 , ((3*A2+A1)*E)/30 , 0],\n [0 , (A1*E)/(10*L) , 0 , 0 , 0 , -((A2+A1)*E)/60 , 0 , -(A1*E)/(10*L) , 0 , 0 , 0 , ((3*A2+A1)*E)/30]])\n\n Kg = Kge1*Te1 + Kge2*Te2\n\n if (R is not None):\n RR = scipy.linalg.block_diag(R,R,R,R)\n Kg = np.transpose(RR).dot(Kg.dot(RR))\n\n return Kg",
"def analyze_orbit_corrector(OC1, OC2, beamline, phase_beg):\n\n M = np.identity(4)\n OC_parameters = np.zeros(4)\n\n for element in beamline:\n M = np.dot(element.M1, M)\n\n # Since the X and Y are decoupled, we can treat them separately.\n M_x = M[0:2, 0:2]\n M_y = M[2:4, 2:4]\n\n L1 = [[OC1.length/2], [1]]\n L2 = [[OC2.length/2], [1]]\n\n M_OC1 = np.array(OC1.M1)[0:2, 0:2]\n M_OC2 = np.array(OC2.M1)[0:2, 0:2]\n\n # The following part solve the cx_1 and cx_2\n M1_x = np.linalg.multi_dot([M_OC2, M_x, L1])\n M2_x = np.linalg.multi_dot([M_OC2, M_x, M_OC1])\n M_OC_x = np.hstack((M1_x, L2))\n\n OC_parameters[0:2] = -np.linalg.multi_dot([np.linalg.inv(M_OC_x), M2_x, phase_beg[0:2]])\n # The end of the X-part\n\n # The following part solve the cy_1 and cy_2\n M1_y = np.linalg.multi_dot([M_OC2, M_y, L1])\n M2_y = np.linalg.multi_dot([M_OC2, M_y, M_OC1])\n M_OC_y = np.hstack((M1_y, L2))\n\n OC_parameters[2:4] = -np.linalg.multi_dot([np.linalg.inv(M_OC_y), M2_y, phase_beg[2:4]])\n # The end of the Y-part\n\n\n return OC_parameters",
"def linear_LS_triangulation(u1, P1, u2, P2):\n A = np.zeros((4, 3))\n b = np.zeros((4, 1))\n\n # Create array of triangulated points\n x = np.zeros((3, len(u1)))\n\n # Initialize C matrices\n C1 = np.array(linear_LS_triangulation_C)\n C2 = np.array(linear_LS_triangulation_C)\n\n for i in range(len(u1)):\n # Derivation of matrices A and b:\n # for each camera following equations hold in case of perfect point matches:\n # u.x * (P[2,:] * x) = P[0,:] * x\n # u.y * (P[2,:] * x) = P[1,:] * x\n # and imposing the constraint:\n # x = [x.x, x.y, x.z, 1]^T\n # yields:\n # (u.x * P[2, 0:3] - P[0, 0:3]) * [x.x, x.y, x.z]^T + (u.x * P[2, 3] - P[0, 3]) * 1 = 0\n # (u.y * P[2, 0:3] - P[1, 0:3]) * [x.x, x.y, x.z]^T + (u.y * P[2, 3] - P[1, 3]) * 1 = 0\n # and since we have to do this for 2 cameras, and since we imposed the constraint,\n # we have to solve 4 equations in 3 unknowns (in LS sense).\n #\n # Build C matrices, to construct A and b in a concise way\n C1[:, 2] = u1[i, :]\n C2[:, 2] = u2[i, :]\n\n # Build A matrix:\n # [\n # [ u1.x * P1[2,0] - P1[0,0], u1.x * P1[2,1] - P1[0,1], u1.x * P1[2,2] - P1[0,2] ],\n # [ u1.y * P1[2,0] - P1[1,0], u1.y * P1[2,1] - P1[1,1], u1.y * P1[2,2] - P1[1,2] ],\n # [ u2.x * P2[2,0] - P2[0,0], u2.x * P2[2,1] - P2[0,1], u2.x * P2[2,2] - P2[0,2] ],\n # [ u2.y * P2[2,0] - P2[1,0], u2.y * P2[2,1] - P2[1,1], u2.y * P2[2,2] - P2[1,2] ]\n # ]\n A[0:2, :] = C1.dot(P1[0:3, 0:3]) # C1 * R1\n A[2:4, :] = C2.dot(P2[0:3, 0:3]) # C2 * R2\n\n # Build b vector:\n # [\n # [ -(u1.x * P1[2,3] - P1[0,3]) ],\n # [ -(u1.y * P1[2,3] - P1[1,3]) ],\n # [ -(u2.x * P2[2,3] - P2[0,3]) ],\n # [ -(u2.y * P2[2,3] - P2[1,3]) ]\n # ]\n b[0:2, :] = C1.dot(P1[0:3, 3:4]) # C1 * t1\n b[2:4, :] = C2.dot(P2[0:3, 3:4]) # C2 * t2\n b *= -1\n\n # Solve for x vector\n cv2.solve(A, b, x[:, i:i + 1], cv2.DECOMP_SVD)\n\n return np.transpose(x), np.ones(len(u1), dtype=bool)",
"def phase_blocks(posBlock, GTblock, RefBlock, FlagB):\n blockSameCount = 0\n blockReverseCount = 0\n GTblockPhase = []\n GTblockReturn = []\n\n for i in range(len(GTblock)):\n GT = GTblock[i]\n RefGT = RefBlock[i]\n if FlagB[i] == \"FV\": # uncertain variants are set to N\n GTblock[i] = ['N', 'N']\n else: # find and count cases when phased genotype is consistent/inconsistent with parental genotypes\n GTphase = phase_state(GT, RefGT)\n if GTphase == 'same':\n blockSameCount += 1\n GTblockPhase.append('same')\n elif GTphase == 'reverse':\n blockReverseCount += 1\n GTblockPhase.append('reverse')\n\n # find prevalent phase \n if all_same(GTblockPhase) and (len(GTblockPhase) >= 2): # absolutely consistent with parental genotypes\n if GTblockPhase[0] == ['same']:\n RSratio = 1.0\n else:\n RSratio = 0.0\n RSratio = 0.0\n elif GTblockPhase == []: # phase unknown\n RSratio = 'NA'\n else:\n RSratio = float(blockSameCount)/float(blockSameCount+blockReverseCount) # proportion of 'same' phasing state in block strings\n\n # define the block phase and produce output\n if (RSratio == 'NA') or (RSratio < 0.90 and RSratio > 0.10): # discard block that have > 90% of inconsistency with parental reference genotypes, or \n for j in range(len(GTblock)):\n posBlockPrint = posBlock[j]\n GTblockPrint1 = 'N'\n GTblockPrint2 = 'N'\n GTblockReturn.append([posBlockPrint[0], posBlockPrint[1], GTblockPrint1, GTblockPrint2])\n else: # phase according to the prevalent state\n # find prevalent state\n phaseStateNumber = max(map(GTblockPhase.count, GTblockPhase))\n GTblockDefinedPahse = list(set( i for i in GTblockPhase if GTblockPhase.count(i) == phaseStateNumber ))\n if len(GTblockDefinedPahse) == 1: # check if one state is prevalent\n if GTblockDefinedPahse == ['same']:\n phaseState = [0,1]\n else:\n phaseState = [1,0]\n for j in range(len(GTblock)):\n GT = GTblock[j]\n posBlockPrint = posBlock[j]\n GTblockPrint1 = GT[phaseState[0]]\n GTblockPrint2 = GT[phaseState[1]]\n GTblockReturn.append([posBlockPrint[0], posBlockPrint[1], GTblockPrint1, GTblockPrint2])\n else: # if there is conflict in phasing state, set to Ns. It usually applies for blocks with less then 10 position overlaps with parental reference.\n for j in range(len(GTblock)):\n posBlockPrint = posBlock[j]\n GTblockPrint1 = 'N'\n GTblockPrint2 = 'N'\n GTblockReturn.append([posBlockPrint[0], posBlockPrint[1], GTblockPrint1, GTblockPrint2])\n phaseState = [0,1]\n\n return(GTblockReturn, RSratio)",
"def __init__(self, \n nd = 2, \n goal = np.array([1.0,1.0]),\n state_bound = [[0,1],[0,1]],\n nA = 4,\n action_list = [[0,1],[0,-1],[1,0],[-1,0]],\n<<<<<<< HEAD:archive-code/puddleworld.py\n ngrid = [10.0,10.0],\n maxStep = 40):\n ngrid = [40, 40]\n x_vec = np.linspace(0,1,ngrid[0])\n y_vec = np.linspace(0,1,ngrid[1])\n for x in x_vec:\n for y in y_vec:\n if ~self.inPuddle([x,y]):\n puddle.append([x,y])\n # puddle is a closed loop \n outpuddlepts = np.asarray(puddle)\n \"\"\"\n\n\n # Horizontal wing of puddle consists of \n # 1) rectangle area xch1<= x <=xc2 && ych1-radius <= y <=ych2+radius\n # (xchi,ychi) is the center points (h ==> horizantal)\n # x, y = state[0], state[1]\n xch1, ych1 = 0.3, 0.7\n xch2, ych2 = 0.65, ych1\n radius = 0.1\n\n\n #Vertical wing of puddle consists of \n # 1) rectangle area xcv1-radius<= x <=xcv2+radius && ycv1 <= y <= ycv2\n # where (xcvi,ycvi) is the center points (v ==> vertical)\n xcv1 = 0.45; ycv1=0.4;\n xcv2 = xcv1; ycv2 = 0.8;\n\n # % 2) two half-circle at end edges of rectangle\n \n # POINTS ON HORIZANTAL LINES OF PUDDLE BOUNDARY\n for x in np.arange(xch1,xcv1-radius,self.meshsize[0]/2):\n puddle.append([x,ych1-radius])\n puddle.append([xcv1-radius,ych1-radius])\n \n for x in np.arange(xcv1+radius,xch2,self.meshsize[0]/2):\n puddle.append([x,ych1-radius])\n \n for x in np.arange(xch1,xcv1-radius,self.meshsize[0]/2):\n puddle.append([x,ych1+radius])\n \n puddle.append([xcv1-radius,ych1+radius])\n\n\n for x in np.arange(xcv1+radius,xch2,self.meshsize[0]/2):\n puddle.append([x,ych1+radius])\n\n # POINTS ON VERTICAL LINES OF PUDDLE BOUNDARY\n for y in np.arange(ycv1,ych1-radius,self.meshsize[1]/2):\n puddle.append([xcv1-radius,y])\n \n for y in np.arange(ycv1,ych1-radius,self.meshsize[1]/2):\n puddle.append([xcv1+radius,y])\n \"\"\"\n for y in np.arrange():\n puddle.append([])\n \n for y in np.arrange():\n puddle.append([])\n \"\"\"\n\n # HALF CIRCLES\n ngridTheta = 10\n thetaVec = np.linspace(0,pi,ngridTheta)\n\n for t in thetaVec:\n puddle.append([xch1+radius*np.cos(pi/2+t),ych1+radius*np.sin(pi/2+t)])\n\n for t in thetaVec:\n puddle.append([xch2+radius*np.cos(-pi/2+t),ych2+radius*np.sin(-pi/2+t)])\n\n for t in thetaVec:\n puddle.append([xcv1+radius*np.cos(pi+t),ycv1+radius*np.sin(pi+t)])\n\n for t in thetaVec:\n puddle.append([xcv2+radius*np.cos(t),ycv2+radius*np.sin(t)])\n\n \n outpuddlepts = np.asarray(puddle)\n return outpuddlepts",
"def stitch(KPS1, KPS2, H1, H2, match): #---- stich image to previous one\r\n #--- projection image1 from plane to cylindrical ---\r\n total = np.minimum(match.shape[0],100); # total pairing number\r\n bin1 = match[0:total,0].astype(int); # feature no at image 1\r\n R1 = KPS1.keyz[bin1, 0]; # keypoint Y at image 1\r\n C1 = KPS1.keyz[bin1, 1]; # keypoint X at image 1\r\n V1, U1 = pano_tools.project_p2c_points(R1, C1, H1);\r\n #--- image 2 ---\r\n bin2 = match[0:total,1].astype(int); # feature no at image 2\r\n R2 = KPS2.keyz[bin2, 0]; # keypoint Y at image 2\r\n C2 = KPS2.keyz[bin2, 1]; # keypoint X at image 2\r\n Rc2 = H2[0]/2; Rp2= R2 - Rc2; \r\n Cc2 = H2[1]/2; Cp2= C2 - Cc2;\r\n #--- --- \r\n # {phi1,S1,TU1,TV1} = M*M matrix: which is derived by chosen 2 pairs \r\n # {phi0,S0,TU0,TV0} = scalar: which is initial guess by removing outlier\r\n # \r\n phi1,S1,TU1,TV1= pano_tools.derive_p2c_formula(U1,V1,Cp2,Rp2);\r\n seq,phi0,S0,TU0,TV0 = pano_tools.remove_ill_matched_pair(phi1,S1,TU1,TV1); \r\n #--- linear regression [not necessary] ---\r\n # U1X = U1[seq]; C2X = C2[seq]; V1X = V1[seq]; R2X = R2[seq]; \r\n # phi0,S0,TU0,TV0,Err= pano_tools.linear_regression(V1X,U1X,R2X,C2X, phi0,S0,TU0,TV0,H2)\r\n H2[3]= phi0; H2[4]= S0; H2[5]= TV0; H2[6]= TU0;",
"def combine_phase(laz, raz, grf_lf_ind, grf_rf_ind, hz, acc_hip_z, acc_hip_x, total_accel):\n # reshape for faster computation\n laz = laz.values.reshape(-1, )\n raz = raz.values.reshape(-1, )\n\n # Check and mark rows with missing data\n length = len(laz)\n missing_data = False\n nan_row = []\n if np.isnan(laz).any() or np.isnan(raz).any():\n missing_data = True\n if missing_data:\n nan_row = np.where(np.isnan(laz) | np.isnan(raz))[0]\n finite_row = np.array(list(set(range(length)) - set(nan_row)))\n laz = np.delete(laz, nan_row, )\n raz = np.delete(raz, nan_row, )\n\n # Filter through low-pass filter\n la_magn = filter_data(laz, filt='low', highcut=ct.cutoff_magn, fs=hz)\n ra_magn = filter_data(raz, filt='low', highcut=ct.cutoff_magn, fs=hz)\n\n acc_hip_z = filter_data(acc_hip_z, filt='low', highcut=6)\n acc_hip_x = filter_data(acc_hip_x, filt='low', highcut=40)\n acc_hip = filter_data(total_accel, filt='low', highcut=15)\n\n # Get balance/movement phase and start and end of movement phase for both\n # right and left foot\n lf_ph, lf_sm, lf_em = _body_phase(la_magn, hz)\n rf_ph, rf_sm, rf_em = _body_phase(ra_magn, hz)\n\n _impact_detect(phase=lf_ph,\n start_move=lf_sm,\n end_move=lf_em,\n grf=grf_lf_ind,\n acc_hip_z=acc_hip_z,\n acc_hip_x=acc_hip_x,\n acc_hip=acc_hip) # detect and add impacts\n del lf_sm, lf_em # no use in further computations\n\n _impact_detect(phase=rf_ph,\n start_move=rf_sm,\n end_move=rf_em,\n grf=grf_rf_ind,\n acc_hip_z=acc_hip_z,\n acc_hip_x=acc_hip_x,\n acc_hip=acc_hip) # detect and add impacts\n del rf_sm, rf_em, raz # no use in further computations\n\n # Insert previous value for phase where data needed to predict was missing\n if missing_data:\n phase_lf = np.ones(length).astype(int)\n phase_lf[finite_row] = lf_ph\n phase_rf = np.ones(length).astype(int)\n phase_rf[finite_row] = rf_ph\n for i in nan_row:\n phase_lf[i] = phase_lf[i - 1]\n phase_rf[i] = phase_rf[i - 1]\n else:\n phase_lf, phase_rf = lf_ph, rf_ph\n\n return phase_lf, phase_rf",
"def F_trans(self):\n rho_H1 = self.edp_par['rho_H1'].value\n Z_H1 = self.edp_par['Z_H1'].value\n sigma_H1 = self.edp_par['sigma_H1'].value\n rho_H2 = self.edp_par['rho_H2'].value\n Z_H2 = self.edp_par['Z_H2'].value\n sigma_H2 = self.edp_par['sigma_H2'].value\n rho_M = self.edp_par['rho_M'].value\n sigma_M = self.edp_par['sigma_M'].value\n psi = self.edp_par['psi'].value \n common_scale = self.edp_par['common_scale'].value\n \n \n # Make sure Z_H2 > Z_H1. If Z_H2 < Z_H1, swap them\n if Z_H1 > Z_H2:\n Z_H1, Z_H2 = Z_H2, Z_H1\n sigma_H1, sigma_H2 = sigma_H2, sigma_H1\n rho_H1, rho_H2 = rho_H2, rho_H1\n \n # Calculate the intermediate variables\n alpha = self.qz*cos(psi) - self.qx*sin(psi)\n Z_CH2 = Z_H1 - sigma_H1\n Z_W = Z_H2 + sigma_H2\n DeltaZ_H = Z_W - Z_CH2\n \n # Calculate the Gaussian part \n FG = -rho_M*sigma_M * exp(-0.5*(alpha*sigma_M)**2)\n FG += 2*rho_H1*sigma_H1 * cos(alpha*Z_H1) * exp(-0.5*(alpha*sigma_H1)**2)\n FG += 2*rho_H2*sigma_H2 * cos(alpha*Z_H2) * exp(-0.5*(alpha*sigma_H2)**2)\n FG *= np.sqrt(2*pi)\n \n # Calculate the strip part\n FS = -2 * sin(alpha*Z_CH2) / alpha\n \n # Calculate the bridging part\n FB = 1 / (alpha + pi/DeltaZ_H)\n FB += 1 / (alpha - pi/DeltaZ_H)\n FB *= sin(alpha*Z_W) + sin(alpha*Z_CH2)\n FB *= 0.5\n FB -= (sin(alpha*Z_W)-sin(alpha*Z_CH2)) / alpha\n \n return common_scale * (FG + FS + FB)",
"def processPhaseHeight(self, phasesInRing1, phasesInRing2):\n P11, P12, P21, P22 = ([] for i in range(4))\n phaseHeightDictionary = {}\n\n [P11.append(index+1)for index, value in enumerate(self.phaseDurationList)\n if value > 0.0 and index < 2]\n [P12.append(index+1)for index, value in enumerate(self.phaseDurationList)\n if value > 0.0 and index >= 2 and index < 4]\n [P21.append(index+1)for index, value in enumerate(self.phaseDurationList)\n if value > 0.0 and index >= 4 and index < 6]\n [P22.append(index+1)for index, value in enumerate(self.phaseDurationList)\n if value > 0.0 and index >= 6 and index < 8]\n\n if (len(P11) == len(P21)):\n for index in range(len(P11)):\n if len(P11) > 0:\n phaseHeightDictionary[str(P11[index])] = 10\n\n for index in range(len(P21)):\n if len(P21) > 0:\n phaseHeightDictionary[str(P21[index])] = 10\n\n elif (len(P11) < len(P21)):\n for index in range(len(P11)):\n if len(P11) > 0:\n phaseHeightDictionary[str(P11[index])] = 20\n\n for index in range(len(P21)):\n if len(P21) > 0:\n phaseHeightDictionary[str(P21[index])] = 10\n\n elif (len(P11) > len(P21)):\n for index in range(len(P11)):\n if len(P11) > 0:\n phaseHeightDictionary[str(P11[index])] = 10\n\n for index in range(len(P21)):\n if len(P21) > 0:\n phaseHeightDictionary[str(P21[index])] = 20\n\n if (len(P12) == len(P22)):\n for index in range(len(P12)):\n if len(P12) > 0:\n phaseHeightDictionary[str(P12[index])] = 10\n\n for index in range(len(P22)):\n if len(P22) > 0:\n phaseHeightDictionary[str(P22[index])] = 10\n\n elif (len(P12) < len(P22)):\n for index in range(len(P12)):\n if len(P12) > 0:\n phaseHeightDictionary[str(P12[index])] = 20\n for index in range(len(P22)):\n if len(P22) > 0:\n phaseHeightDictionary[str(P22[index])] = 10\n\n elif (len(P12) > len(P22)):\n for index in range(len(P12)):\n if len(P12) > 0:\n phaseHeightDictionary[str(P12[index])] = 10\n for index in range(len(P22)):\n if len(P22) > 0:\n phaseHeightDictionary[str(P22[index])] = 20\n\n for phase in phasesInRing1:\n for key, value in phaseHeightDictionary.items():\n if int(key) == phase:\n self.phaseHeightInRing1.append(value)\n\n for phase in phasesInRing2:\n for key, value in phaseHeightDictionary.items():\n if int(key) == phase:\n self.phaseHeightInRing2.append(value)",
"def monolayer_4band():\n a = 0.222\n ax = 0.438\n ay = 0.332\n theta = 96.79 * (pi / 180)\n phi = 103.69 * (pi / 180)\n\n lat = pb.Lattice(a1=[ax, 0], a2=[0, ay])\n\n h = a * sin(phi - pi / 2)\n s = 0.5 * ax - a * cos(theta / 2)\n lat.add_sublattices(\n ('A', [0, 0, h], 0),\n ('B', [s, 0, 0], 0),\n ('C', [ax/2, ay/2, 0], 0),\n ('D', [ax/2 + s, ay/2, h], 0)\n )\n\n lat.register_hopping_energies({\n 't1': -1.22,\n 't2': 3.665,\n 't3': -0.205,\n 't4': -0.105,\n 't5': -0.055\n })\n\n lat.add_hoppings(\n # t1\n ([-1, 0], 'A', 'D', 't1'),\n ([-1, -1], 'A', 'D', 't1'),\n ([ 0, 0], 'B', 'C', 't1'),\n ([ 0, -1], 'B', 'C', 't1'),\n # t2\n ([ 0, 0], 'A', 'B', 't2'),\n ([ 0, 0], 'C', 'D', 't2'),\n # t3\n ([ 0, 0], 'A', 'D', 't3'),\n ([ 0, -1], 'A', 'D', 't3'),\n ([ 1, 1], 'C', 'B', 't3'),\n ([ 1, 0], 'C', 'B', 't3'),\n # t4\n ([ 0, 0], 'A', 'C', 't4'),\n ([ 0, -1], 'A', 'C', 't4'),\n ([-1, 0], 'A', 'C', 't4'),\n ([-1, -1], 'A', 'C', 't4'),\n ([ 0, 0], 'B', 'D', 't4'),\n ([ 0, -1], 'B', 'D', 't4'),\n ([-1, 0], 'B', 'D', 't4'),\n ([-1, -1], 'B', 'D', 't4'),\n # t5\n ([-1, 0], 'A', 'B', 't5'),\n ([ 0, 1], 'A', 'B', 't5'),\n ([ 0, -1], 'A', 'B', 't5'),\n ([-1, 0], 'C', 'D', 't5'),\n ([ 0, 1], 'C', 'D', 't5'),\n ([ 0, -1], 'C', 'D', 't5'),\n )\n\n return lat",
"def eo_edges(self):\n logger.info(\"eo_edges called\")\n permutations = []\n original_state = self.state[:]\n original_solution = self.solution[:]\n tmp_solution_len = len(self.solution)\n\n # Build a list of the wing strings at each midge\n wing_strs = []\n\n for _, square_index, partner_index in midges_recolor_tuples_555:\n square_value = self.state[square_index]\n partner_value = self.state[partner_index]\n wing_str = square_value + partner_value\n wing_str = wing_str_map[square_value + partner_value]\n wing_strs.append(wing_str)\n\n # build a list of all possible EO permutations...an even number of edges must be high\n for num in range(4096):\n num = str(bin(num)).lstrip(\"0b\").zfill(12)\n if num.count(\"1\") % 2 == 0:\n permutations.append(list(map(int, num)))\n\n # Put all 2048 starting states in a file and point ida-via-graph\n # at the file so it can solve all of them and apply the one that is the shortest.\n lr_center_stage_states = []\n eo_outer_orbit_states = []\n eo_inner_orbit_states = []\n\n for permutation in permutations:\n must_be_uppercase = []\n must_be_lowercase = []\n self.state = original_state[:]\n\n for wing_str, uppercase in zip(wing_strs, permutation):\n if uppercase:\n must_be_uppercase.append(wing_str)\n else:\n must_be_lowercase.append(wing_str)\n\n # logger.info(\"%s: %s permutation %s\" % (self, index, \"\".join(map(str, permutation))))\n self.edges_flip_orientation(must_be_uppercase, must_be_lowercase)\n\n # build lists of the states that we need to find state_indexes for\n lr_center_stage_states.append(self.lt_phase3_lr_center_stage.state())\n eo_outer_orbit_states.append(self.lt_phase3_eo_outer_orbit.state())\n eo_inner_orbit_states.append(self.lt_phase3_eo_inner_orbit.state())\n\n # now we have a huge list of states to lookup, do a binary search on multiple states at once (this is drastically faster\n # than binary searching for them individually). state_index_multiple() will return a dict where the state is the key\n # and the state_index is the value.\n lr_center_stage_eo_inner_orbit_state_indexes = self.lt_phase3_lr_center_stage.state_index_multiple(\n lr_center_stage_states\n )\n eo_outer_orbit_state_indexes = self.lt_phase3_eo_outer_orbit.state_index_multiple(eo_outer_orbit_states)\n eo_inner_orbit_state_indexes = self.lt_phase3_eo_inner_orbit.state_index_multiple(eo_inner_orbit_states)\n\n # build a list of tuples of the state indexes\n pt_state_indexes = []\n for lr_center_stage_eo_inner_orbit_state, eo_outer_orbit_state, eo_inner_orbit_state in zip(\n lr_center_stage_states, eo_outer_orbit_states, eo_inner_orbit_states\n ):\n pt_state_indexes.append(\n (\n lr_center_stage_eo_inner_orbit_state_indexes[lr_center_stage_eo_inner_orbit_state],\n eo_outer_orbit_state_indexes[eo_outer_orbit_state],\n eo_inner_orbit_state_indexes[eo_inner_orbit_state],\n )\n )\n\n self.state = original_state[:]\n self.solution = original_solution[:]\n\n # When solve_via_c is passed pt_state_indexes (2048 lines of states in this case), it will try all 2048 of them\n # to find the state that has the shortest solution.\n self.lt_phase3.solve_via_c(pt_states=pt_state_indexes)\n\n self.print_cube_add_comment(\"edges EOed into high/low groups\", tmp_solution_len)\n self.post_eo_state = self.state[:]\n self.post_eo_solution = self.solution[:]\n\n # re-color the cube so that the edges are oriented correctly so we can\n # pair 4-edges then 8-edges. After all edge pairing is done we will uncolor\n # the cube and re-apply the solution.\n self.edges_flip_orientation(wing_strs, [])\n self.highlow_edges_print()",
"def do_BA2(kp_3d, kp_2d, des, comp_list, H, map_3d, map_2d, map_des, map_cam, map_view, my_update, col, col2, my_max, BA=0):\n # Setting the Format of inputs for using BA modules\n camera_params, points_3d, points_2d, camera_ind, points_ind, final_l1, final_l2, low_bound, up_bound, map_des, map_2d = get_things1(kp_3d, kp_2d, des, comp_list, H, map_3d, map_2d, map_des, map_cam, map_view, my_max)\n n_cameras = camera_params.shape[0]\n n_points = points_3d.shape[0]\n n = 9 * n_cameras + 3 * n_points\n m = 2 * points_2d.shape[0]\n # Optimisation Variable\n x0 = np.hstack((camera_params.ravel(), points_3d[:, 0:3].ravel()))\n resx = x0.copy()\n if(BA==1):\n # Standard BA Module\n f0 = fun(x0, n_cameras, n_points, camera_ind, points_ind, points_2d[:,:2], points_2d[:,2])\n A = bundle_adjustment_sparsity(n_cameras, n_points, camera_ind, points_ind)\n t0 = time.time()\n\n res = least_squares(fun, x0, jac_sparsity=A, bounds=(low_bound, up_bound), verbose=2, x_scale='jac', ftol=1e-4, method='trf',\n args=(n_cameras, n_points, camera_ind, points_ind, points_2d[:,:2], points_2d[:,2]))\n t1 = time.time()\n\n resx = res.x\n # Updating the Map with updated points and transformations\n my_min = 0\n my_max = np.max(camera_ind)+1\n H_op = np.zeros((3,4))\n H_op[0:3,0:3] = R.from_rotvec(resx[(my_max-1)*9:(my_max-1)*9+3]).as_matrix()\n H_op[0:3,3] = resx[(my_max-1)*9+3:(my_max-1)*9+6] # Updating the final transformation\n \n final_pts = np.array(resx[my_max*9:]).reshape(-1,3)\n ini_pts = np.array(x0[my_max*9:]).reshape(-1,3)\n map_view = np.vstack((map_view,resx[(my_max-1)*9:(my_max-1)*9+6])) # Updating Transformations in the map\n\n for i in range(my_min,my_max-1):\n map_view[i] = resx[i*9 : i*9+6]\n update_list = []\n count = 0\n count1 = 0\n for i in range(len(final_l1)):\n # Identifying the Map points\n if(final_l2[i]==1):\n update_list.append(final_l1[i])\n if(final_l2[i]==0):\n count1 += 1\n err = np.sqrt(np.sum(np.square((final_pts[points_ind[i]] - ini_pts[points_ind[i]]).ravel()))/3)\n map_3d[final_l1[i]] = final_pts[points_ind[i]] # Updating the map points\n if(np.max(map_cam[final_l1[i]])!=my_max-1):\n map_cam[final_l1[i]].append(my_max-1) # Updating the map views\n count +=1\n \n # Adding the Notseen points to the Map\n update_list = np.array(update_list)\n l2 = np.unique(np.sort(update_list))\n if(my_update==1):\n l1 = []\n l2 = []\n new_3d = []\n new_2d = []\n new_cam = []\n new_view = []\n new_des = []\n new_col = []\n l2 = np.unique(np.sort(update_list))\n j = 0\n for i in range(len(kp_2d)):\n if(i == l2[j]):\n j += 1\n if(j==len(l2)):\n j = 0\n else:\n pt = (np.linalg.inv(H_op[0:3,0:3])@(kp_3d[i].T - H_op[:,3]))\n new_3d.append(pt)\n new_2d = []\n new_cam = []\n new_des.append(des[i])\n new_2d.append(kp_2d[i])\n new_cam.append(my_max-1)\n new_col.append(col2[i])\n map_2d.append(new_2d)\n map_cam.append(new_cam)\n\n new_3d = np.array(new_3d)\n new_des = np.array(new_des)\n new_col = np.array(new_col)\n map_3d = np.vstack((map_3d,new_3d))\n map_des = np.vstack((map_des,new_des))\n col = np.vstack((col,new_col))\n\n return H_op, map_3d, map_2d, map_des, map_cam, map_view, col, my_max-1, len(l2)",
"def update_chains(self):\r\n _, black_positions, white_positions = self.get_positions()\r\n\r\n self.bfs(black_positions, 1)\r\n self.bfs(white_positions, 2)",
"def normal_modes_gHST(R, NL, KL, params, dispersion=[], spin_dir=[], sublattice_labels=[], b='hang', spring='auto',\n pin='auto'):\n try:\n NP, NN = np.shape(NL)\n except:\n '''There is only one particle.'''\n NP = 1\n NN = 0\n\n M1 = np.zeros((2 * NP, 2 * NP))\n M2 = np.zeros((2 * NP, 2 * NP))\n if spring == 'auto':\n spring = params['k'] * params['l'] ** 2 / (params['I3'] * np.abs(params['w3']))\n # If there is more than one particle, and if the speeds vary from particle to particle,\n # then make spring the same length as a dynamical matrix column\n if len(spring) > 0:\n if (abs(spring - spring[0]) > 1e-9).any():\n # The rotation rates vary from particle to particle, so reshape\n spring_new = np.zeros_like(spring)\n dmyi = 0 # a new index ('dummy i')\n for ii in range(NP):\n # Since 2 dof for position of pivot of gHST, double the size\n spring_new[dmyi] = spring[ii]\n spring_new[dmyi + 1] = spring[ii]\n dmyi += 2\n else:\n # the elements are all identical, so just keep the first one\n spring = spring[0]\n\n if pin == 'auto':\n gn = params['Mm'] * params['g']\n pin = params['l'] * gn / (params['I3'] * np.abs(params['w3']))\n # If there is more than one particle, and if the speeds vary from particle to particle,\n # then make pin the same length as a dynamical matrix column\n if len(pin) > 0:\n if (abs(pin - pin[0]) > 1e-9).any():\n # The rotation rates vary from particle to particle, so reshape\n pin_new = np.zeros_like(pin)\n dmyi = 0 # a new index ('dummy i')\n for ii in range(NP):\n # Since 2 dof for position of pivot of gHST, double the size\n pin_new[dmyi] = pin[ii]\n pin_new[dmyi + 1] = pin[ii]\n dmyi += 2\n else:\n # the elements are all identical, so just keep the first one\n pin = pin[0]\n\n m2_shape = np.shape(M2)\n\n if b == 'hang':\n b = np.zeros(NP)\n elif b == 'stand':\n b = np.ones(NP)\n\n if spin_dir == []:\n '''Assume antialigned with a, aligned with body axis 3'''\n spin_dir = np.ones(NP)\n\n print 'Constructing dynamical matrix...'\n for i in range(NP):\n for nn in range(NN):\n\n ni = NL[i, nn] # the number of the gyroscope i is connected to (particle j)\n k = KL[i, nn] # true connection?\n\n if len(dispersion) > 1:\n disp = 1. / (1. + dispersion[i])\n else:\n disp = 1.\n\n diffx = R[ni, 0] - R[i, 0]\n diffy = R[ni, 1] - R[i, 1]\n alphaij = 0.\n\n rij_mag = np.sqrt(diffx ** 2 + diffy ** 2)\n\n if k != 0:\n alphaij = np.arctan2(diffy, diffx)\n\n # for periodic systems, KL is -1 for particles on opposing boundaries\n if KL[i, nn] == -1:\n alphaij = (np.pi + alphaij) % (2 * pi)\n\n # What is this for?\n if KL[i, nn] == -2: # will only happen on first or last gyro in a line\n if i == 0 or i == (NP - 1):\n print i, '--> NL=-2 for this particle'\n yy = np.where(KL[i] == 1)\n dx = R[NL[i, yy], 0] - R[NL[i, yy], 0]\n dy = R[NL[i, yy], 1] - R[NL[i, yy], 1]\n al = (np.arctan2(dy, dx)) % (2 * pi)\n alphaij = np.pi - al\n if i == 1:\n alphaij = np.pi - ((90 / 2) * np.pi / 180.)\n else:\n alphaij = - ((90 / 2) * np.pi / 180.)\n\n Cos = np.cos(alphaij)\n Sin = np.sin(alphaij)\n\n if abs(Cos) < 10E-8:\n Cos = 0.0\n\n if abs(Sin) < 10E-8:\n Sin = 0\n\n Cos2 = Cos ** 2\n Sin2 = Sin ** 2\n CosSin = Cos * Sin\n\n # -1 for aligned with a, 1 for aligned with 3.\n # dir factor :== 1/(-1)^c = (-1)^c\n dir_factor = spin_dir[i]\n\n if len(sublattice_labels) > 0:\n if sublattice_labels[i] == 1:\n extra_factor = 1. * del_A_B\n # print self.del_A_B\n elif sublattice_labels[i] == 0:\n extra_factor = 1.\n else:\n extra_factor = 1.\n else:\n extra_factor = 1.\n\n M1[2 * i, 2 * i] += -disp * k * CosSin * ((-1) ** b[i]) * dir_factor # dxi - dxi\n M1[2 * i, 2 * i + 1] += -disp * k * Sin2 * ((-1) ** b[i]) * dir_factor # dxi - dyi\n M1[2 * i, 2 * ni] += disp * k * CosSin * ((-1) ** b[i]) * dir_factor # dxi - dxj\n M1[2 * i, 2 * ni + 1] += disp * k * Sin2 * ((-1) ** b[i]) * dir_factor # dxi - dyj\n\n # (y components)\n M1[2 * i + 1, 2 * i] += disp * k * Cos2 * ((-1) ** b[i]) * dir_factor # dyi - dxi\n M1[2 * i + 1, 2 * i + 1] += disp * k * CosSin * ((-1) ** b[i]) * dir_factor # dyi - dyi\n M1[2 * i + 1, 2 * ni] += -disp * k * Cos2 * ((-1) ** b[i]) * dir_factor # dyi - dxj\n M1[2 * i + 1, 2 * ni + 1] += -disp * k * CosSin * ((-1) ** b[i]) * dir_factor # dyi - dyj\n\n # if i==0:\n # print '\\n --- \\n added M1[2*i+1, 2*i] = ',disp*k*Cos2 *((-1)**b[i]) *dir_factor\n # print 'dir_factor = ', dir_factor\n # print 'k = ', k\n # print 'else =', ((-1)**b[i]) *dir_factor\n\n # pinning/gravitational matrix\n M2[2 * i, 2 * i + 1] = (1.) * disp * dir_factor * extra_factor\n M2[2 * i + 1, 2 * i] = -(1.) * disp * dir_factor * extra_factor\n\n # self.pin_array.append(2*pi*1*extra_factor)\n # Assumes:\n # (-1)**c adot = - spring* (-1)**b SUM{ z x nij*(nij.(dri-drj)) } + pin\n matrix = - (-spring * M1 + pin * M2)\n\n return matrix",
"def method1(self):\n cres=0. # Variable for storing Chern number.\n # The U matrices from Fukui's method; storage...\n Ux=np.zeros((self.kS.Nx+1,self.kS.Ny+1),dtype=complex)\n Uy=np.zeros((self.kS.Nx+1,self.kS.Ny+1),dtype=complex)\n \n # ... and calculation of U matrices\n for ix in range(self.kS.Nx+1):\n for iy in range(self.kS.Ny+1):\n mat1=self.alleigvecs[:,:,ix ,iy ]\n if ix<self.kS.Nx:\n mat2=self.alleigvecs[:,:,ix+1,iy ]\n else:\n mat2=self.alleigvecs[:,:,1 ,iy ]\n if iy<self.kS.Ny:\n mat3=self.alleigvecs[:,:,ix ,iy+1]\n else:\n mat3=self.alleigvecs[:,:,ix ,1 ]\n Ux[ix,iy]=np.linalg.det(np.dot(np.conj(mat1.T),mat2)[:self.NL,:self.NL])\n Uy[ix,iy]=np.linalg.det(np.dot(np.conj(mat1.T),mat3)[:self.NL,:self.NL])\n \n # Local estimates of Berry curvature; storage ...\n ftempall=np.zeros((self.kS.Nx,self.kS.Ny),complex)\n # ... and calculation\n for ix in range(self.kS.Nx):\n for iy in range(self.kS.Ny):\n ftemp=np.log(Ux[ix,iy]*Uy[ix+1,iy]/Ux[ix,iy+1]/Uy[ix,iy])\n ftempall[ix,iy]=ftemp # ... of local Berry curvature ...\n cres+=ftemp/2./pi/1j # ... and of Berry phase (Chern number).\n\n return cres.real, ftempall",
"def triangulate(Kl, Kr, Twl, Twr, pl, pr, Sl, Sr):\r\n #--- FILL ME IN ---\r\n \r\n # Compute baseline (right camera translation minus left camera translation)\r\n Cr = (Twr)[0:3,-1] #left camera translaton\r\n Cl = (Twl)[0:3,-1] #right camera translation\r\n b = (Cr - Cl).reshape(3,1)\r\n \r\n \r\n # Unit vectors projecting from optical center to image plane points.\r\n # Use variables rayl and rayr for the rays.\r\n rayl = Twl[0:3,0:3].dot(inv(Kl)).dot(np.insert(pl,2,1, axis =0))\r\n rayl = rayl/norm(rayl) #convert to unit vector\r\n \r\n rayr = Twr[0:3,0:3].dot(inv(Kr)).dot(np.insert(pr,2,1, axis =0))\r\n rayr = rayr/norm(rayr) #convert to unit vector\r\n \r\n \r\n # Projected segment lengths.\r\n # Use variables ml and mr for the segment lengths.\r\n rLrR = rayl.T.dot(rayr)[0][0]\r\n ml = ((b.T.dot(rayl) - (b.T.dot(rayr))*(rLrR))/(1-rLrR**2))[0][0]\r\n mr = (rLrR*ml - b.T.dot(rayr))[0][0]\r\n \r\n # Segment endpoints.\r\n # User variables Pl and Pr for the segment endpoints.\r\n Pl = Cl.reshape(3,1) + rayl*ml\r\n Pr = Cr.reshape(3,1) + rayr*mr\r\n \r\n # Now fill in with appropriate ray Jacobians. These are \r\n # 3x4 matrices, but two columns are zeros (because the right\r\n # ray direction is not affected by the left image point and \r\n # vice versa).\r\n drayl = np.zeros((3, 4)) # Jacobian left ray w.r.t. image points.\r\n drayr = np.zeros((3, 4)) # Jacobian right ray w.r.t. image points.\r\n \r\n # Add code here...\r\n #rayl = f(x)_l/g(x)_l = r/norm(r). Equation for unit vector provided in the assignment\r\n #drayl = d/dx[f(x)_l/g(x)_l] = ( d/dx[f(x)_l]*g(x)_l - f(x)_l*d/dx[g(x)_l] / [g(x)_l]^2 )\r\n #where x is the image plane points in the left camera ul (i.e pl[0][0]), vl (i.e pl[1][0]), \r\n #and right camera ur (i.e pr[0][0]), vr (i.e pr[1][0])\r\n \r\n #As per equation in the assignment. I.e column vector (c1*u, c2*v, c3)\r\n fxl = Twl[:3,:3].dot(inv(Kl)).dot(np.array([[pl[0][0]],[pl[1][0]],[1]]))\r\n \r\n #f(x)_l = column vector(c1*ul, c2*vl + c3). \r\n #Therefore f(x)_l w.r.t u = f(x)l_u = column vector (c1, 0, 0,)\r\n fxl_u = Twl[:3,:3].dot(inv(Kl)).dot(np.array([[1],[0],[0]]))\r\n #Therefore f(x)_l w.r.t v = f(x)l_v = column vector (0, c2, 0,)\r\n fxl_v = Twl[:3,:3].dot(inv(Kl)).dot(np.array([[0],[1],[0]]))\r\n \r\n #Same math applied as with f(x)_l shown above - only that it is with the right camera\r\n fxr = Twr[:3,:3].dot(inv(Kr)).dot(np.array([[pr[0][0]],[pr[1][0]],[1]]))\r\n fxr_u = Twr[:3,:3].dot(inv(Kr)).dot(np.array([[1],[0],[0]]))\r\n fxr_v = Twr[:3,:3].dot(inv(Kr)).dot(np.array([[0],[1],[0]]))\r\n \r\n #Recall from above that g(x)_l = norm(r)\r\n gxl = norm(fxl)\r\n #g(x)_l wrt to u is; u*c1^2/norm(r). Where u*c1^2 = fxl_u.T.dot(fxl)\r\n # and gxl = norm(r)\r\n gxl_u = fxl_u.T.dot(fxl)/gxl \r\n #g(x)_l wrt to v is; v*c2^2/norm(r). Where v*c2^2 = fxl_v.T.dot(fxl)\r\n # and gxl = norm(r) \r\n gxl_v = fxl_v.T.dot(fxl)/gxl\r\n \r\n # same as above except with the right camera\r\n gxr = norm(fxr)\r\n gxr_u = fxr_u.T.dot(fxr)/gxr\r\n gxr_v = fxr_v.T.dot(fxr)/gxr\r\n \r\n #Fill in Jacobian results with results from above \r\n drayl[:,0] = ((fxl_u.dot(gxl) - fxl.dot(gxl_u))/(gxl*gxl)).reshape(3,)\r\n drayl[:,1] = ((fxl_v.dot(gxl) - fxl.dot(gxl_v))/(gxl*gxl)).reshape(3,) \r\n drayr[:,2] = ((fxr_u.dot(gxr) - fxr.dot(gxr_u))/(gxr*gxr)).reshape(3,)\r\n drayr[:,3] = ((fxr_v.dot(gxr) - fxr.dot(gxr_v))/(gxr*gxr)).reshape(3,)\r\n \r\n \r\n \r\n #------------------\r\n \r\n # Compute dml and dmr (partials wrt segment lengths).\r\n # Compute dml and dmr (partials wrt segment lengths).\r\n u = np.dot(b.T, rayl) - np.dot(b.T, rayr)*np.dot(rayl.T, rayr)\r\n v = 1 - np.dot(rayl.T, rayr)**2\r\n\r\n du = (b.T@drayl).reshape(1, 4) - \\\r\n (b.T@drayr).reshape(1, 4)*np.dot(rayl.T, rayr) - \\\r\n np.dot(b.T, rayr)*((rayr.T@drayl) + (rayl.T@drayr)).reshape(1, 4)\r\n \r\n dv = -2*np.dot(rayl.T, rayr)*((rayr.T@drayl).reshape(1, 4) + \\\r\n (rayl.T@drayr).reshape(1, 4))\r\n\r\n m = np.dot(b.T, rayr) - np.dot(b.T, rayl)@np.dot(rayl.T, rayr)\r\n n = np.dot(rayl.T, rayr)**2 - 1\r\n\r\n dm = (b.T@drayr).reshape(1, 4) - \\\r\n (b.T@drayl).reshape(1, 4)*np.dot(rayl.T, rayr) - \\\r\n np.dot(b.T, rayl)@((rayr.T@drayl) + (rayl.T@drayr)).reshape(1, 4)\r\n dn = -dv\r\n\r\n dml = (du*v - u*dv)/v**2\r\n dmr = (dm*n - m*dn)/n**2\r\n\r\n # Finally, compute Jacobian for P w.r.t. image points.\r\n JP = (ml*drayl + rayl*dml + mr*drayr + rayr*dmr)/2\r\n \r\n #--- FILL ME IN ---\r\n \r\n # 3D point.\r\n P = (Pl + Pr)/2\r\n \r\n # 3x3 landmark point covariance matrix (need to form\r\n # the 4x4 image plane covariance matrix first).\r\n M = np.zeros((4,4))\r\n M[0:2,0:2] = Sl\r\n M[2:4,2:4] = Sr\r\n \r\n S = JP.dot(M).dot(JP.T) #as per equation in the assignment\r\n\r\n # Check for correct outputs...\r\n correct = isinstance(Pl, np.ndarray) and Pl.shape == (3, 1) and \\\r\n isinstance(Pr, np.ndarray) and Pr.shape == (3, 1) and \\\r\n isinstance(P, np.ndarray) and P.shape == (3, 1) and \\\r\n isinstance(S, np.ndarray) and S.shape == (3, 3)\r\n\r\n if not correct:\r\n raise TypeError(\"Wrong type or size returned!\")\r\n\r\n return Pl, Pr, P, S",
"def contract_tenors(self):\n\n\tself.r_outer_r[:,:,0,1,:] = self.r_outer_r[:,:,0,1,:]/(1. - self.k_dot_r[0,1,:])\n\tself.r_outer_r[:,:,0,2,:] = self.r_outer_r[:,:,0,2,:]/(1. - self.k_dot_r[0,2,:])\n\t\n\tself.r_outer_r[:,:,1,0,:] = self.r_outer_r[:,:,1,0,:]/(1. - self.k_dot_r[1,0,:])\n\tself.r_outer_r[:,:,1,2,:] = self.r_outer_r[:,:,1,2,:]/(1. - self.k_dot_r[1,2,:])\n\t\n\tself.r_outer_r[:,:,2,0,:] = self.r_outer_r[:,:,2,0,:]/(1. - self.k_dot_r[2,0,:])\n\tself.r_outer_r[:,:,2,1,:] = self.r_outer_r[:,:,2,1,:]/(1. - self.k_dot_r[2,1,:])\n\n\tself.delta_l = np.zeros((3,3,self.N),dtype=np.complex_)\n \n\tself.delta_l[0,1,:] = get_l(self,0,1)\n\tself.delta_l[1,0,:] = get_l(self,1,0)\n\t\n\tself.delta_l[0,2,:] = get_l(self,0,2)\n\tself.delta_l[2,0,:] = get_l(self,2,0)\n\t\n\tself.delta_l[1,2,:] = get_l(self,1,2)\n\tself.delta_l[2,1,:] = get_l(self,2,1)\n \n\treturn",
"def __init__(self, start_t: float, end_t: float, num_time_blocks: int):\n self._num_time_blocks: int = num_time_blocks\n self._num_states: Optional[int] = None\n self._nlps: Dict[int, InteriorPointInterface] = dict() # keys are the time block index (passed into the build_model_for_time_block method\n self._link_forward_matrices: Dict[int, coo_matrix] = dict() # these get multiplied by the primal vars of the corresponding time block\n self._link_backward_matrices: Dict[int, coo_matrix] = dict() # these get multiplied by the primal vars of the corresponding time block\n self._link_forward_coupling_matrices: Dict[int, coo_matrix] = dict() # these get multiplied by the coupling variables\n self._link_backward_coupling_matrices: Dict[int, coo_matrix] = dict() # these get multiplied by the coupling variables\n\n self._primals_lb: BlockVector = BlockVector(self._num_time_blocks + 1)\n self._primals_ub: BlockVector = BlockVector(self._num_time_blocks + 1)\n\n self._ineq_lb: BlockVector = BlockVector(self._num_time_blocks)\n self._ineq_ub: BlockVector = BlockVector(self._num_time_blocks)\n\n self._init_primals: BlockVector = BlockVector(self._num_time_blocks + 1)\n self._primals: BlockVector = BlockVector(self._num_time_blocks + 1)\n self._delta_primals: BlockVector = BlockVector(self._num_time_blocks + 1)\n\n self._init_slacks: BlockVector = BlockVector(self._num_time_blocks)\n self._slacks: BlockVector = BlockVector(self._num_time_blocks)\n self._delta_slacks: BlockVector = BlockVector(self._num_time_blocks)\n\n self._init_duals_eq: BlockVector = BlockVector(self._num_time_blocks)\n self._duals_eq: BlockVector = BlockVector(self._num_time_blocks)\n self._delta_duals_eq: BlockVector = BlockVector(self._num_time_blocks)\n\n self._init_duals_ineq: BlockVector = BlockVector(self._num_time_blocks)\n self._duals_ineq: BlockVector = BlockVector(self._num_time_blocks)\n self._delta_duals_ineq: BlockVector = BlockVector(self._num_time_blocks)\n\n self._init_duals_primals_lb: BlockVector = BlockVector(self._num_time_blocks + 1)\n self._duals_primals_lb: BlockVector = BlockVector(self._num_time_blocks + 1)\n self._delta_duals_primals_lb: BlockVector = BlockVector(self._num_time_blocks + 1)\n\n self._init_duals_primals_ub: BlockVector = BlockVector(self._num_time_blocks + 1)\n self._duals_primals_ub: BlockVector = BlockVector(self._num_time_blocks + 1)\n self._delta_duals_primals_ub: BlockVector = BlockVector(self._num_time_blocks + 1)\n\n self._init_duals_slacks_lb: BlockVector = BlockVector(self._num_time_blocks)\n self._duals_slacks_lb: BlockVector = BlockVector(self._num_time_blocks)\n self._delta_duals_slacks_lb: BlockVector = BlockVector(self._num_time_blocks)\n\n self._init_duals_slacks_ub: BlockVector = BlockVector(self._num_time_blocks)\n self._duals_slacks_ub: BlockVector = BlockVector(self._num_time_blocks)\n self._delta_duals_slacks_ub: BlockVector = BlockVector(self._num_time_blocks)\n\n self._eq_resid: BlockVector = BlockVector(self._num_time_blocks)\n self._ineq_resid: BlockVector = BlockVector(self._num_time_blocks)\n self._grad_objective: BlockVector = BlockVector(self._num_time_blocks + 1)\n self._jac_eq: BlockMatrix = BlockMatrix(nbrows=self._num_time_blocks, nbcols=self._num_time_blocks + 1)\n self._jac_ineq: BlockMatrix = BlockMatrix(nbrows=self._num_time_blocks, nbcols=self._num_time_blocks + 1)\n self._kkt: BlockMatrix = BlockMatrix(nbrows=num_time_blocks + 1, nbcols=num_time_blocks + 1)\n self._rhs: BlockVector = BlockVector(nblocks=num_time_blocks + 1)\n\n self._setup(start_t=start_t, end_t=end_t)\n self._setup_block_vectors()\n self._setup_jacs()\n self._setup_kkt_and_rhs_structure()",
"def model_prem(r):\n\n\t#- normalised radius\n\tx = r / 6371000.0\n\n\t#- march through the various depth levels -----------------------------------------------------\n\n\t#- upper crust\n\tif (r >= 6356000.0):\n\t\trho = 2.6\n\t\tvpv = 5.8\n\t\tvph = vpv\n\t\tvsv = 3.2\n\t\tvsh = vsv\n\t\teta = 1.0\n\n\t#- lower crust\n\telif (r >= 6346000.6) & (r < 6356000.0):\n\t\trho = 2.9\n\t\tvpv = 6.8\n\t\tvph = vpv\n\t\tvsv = 3.9\n\t\tvsh = vsv\n\t\teta = 1.0\n\n\t#- LID\n\telif (r >= 6291000.0) & (r < 6346000.6):\n\t\trho = 2.6910 + 0.6924 * x\n\t\tvpv = 0.8317 + 7.2180 * x\n\t\tvph = 3.5908 + 4.6172 * x\n\t\tvsv = 5.8582 - 1.4678 * x\n\t\tvsh = -1.0839 + 5.7176 * x\n\t\teta = 3.3687 - 2.4778 * x\n\n\t#- LVZ\n\telif (r >= 6151000.0) & (r < 6291000.0):\n\t\trho = 2.6910 + 0.6924 * x\n\t\tvpv = 0.8317 + 7.2180 * x\n\t\tvph = 3.5908 + 4.6172 * x\n\t\tvsv = 5.8582 - 1.4678 * x\n\t\tvsh = -1.0839 + 5.7176 * x\n\t\teta = 3.3687 - 2.4778 * x\n\n\t#- Transition zone 1\n\telif (r >= 5971000.0) & (r < 6151000.0):\n\t\trho = 7.1089 - 3.8045 * x\n\t\tvpv = 20.3926 - 12.2569 * x\n\t\tvph = vpv\n\t\tvsv = 8.9496 - 4.4597 * x\n\t\tvsh = vsv\n\t\teta = 1.0\n\n\t#- Transition zone 2\n\telif (r >= 5771000.0) & (r < 5971000.0):\n\t\trho = 11.2494 - 8.0298 * x\n\t\tvpv = 39.7027 - 32.6166 * x\n\t\tvph = vpv\n\t\tvsv = 22.3512 - 18.5856 * x\n\t\tvsh = vsv\n\t\teta = 1.0\n\n\t#- Transition zone 3\n\telif (r >= 5701000.0) & (r < 5771000.0):\n\t\trho = 5.3197 - 1.4836 * x\n\t\tvpv = 19.0957 - 9.8672 * x\n\t\tvph = vpv\n\t\tvsv = 9.9839 - 4.9324 * x\n\t\tvsh = vsv\n\t\teta = 1.0\n\n\t#- Lower mantle 1\n\telif (r >= 5600000.0) & (r < 5701000.0):\n\t\trho = 7.9565 - 6.4761 * x + 5.5283 * x**2 - 3.0807 * x**3\n\t\tvpv = 29.2766 - 23.6027 * x + 5.5242 * x**2 - 2.5514 * x**3\n\t\tvph = vpv\n\t\tvsv = 22.3459 - 17.2473 * x - 2.0834 * x**2 + 0.9783 * x**3\n\t\tvsh = vsv\n\t\teta = 1.0 \n\n\t#- Lower mantle 2\n\telif (r >= 3630000.0) & (r < 5600000.0):\n\t\trho = 7.9565 - 6.4761 * x + 5.5283 * x**2 - 3.0807 * x**3\n\t\tvpv = 24.9520 - 40.4673 * x + 51.4832 * x**2 - 26.6419 * x**3\n\t\tvph = vpv\n\t\tvsv = 11.1671 - 13.7818 * x + 17.4575 * x**2 - 9.2777 * x**3\n\t\tvsh = vsv\n\t\teta = 1.0\n\n\t#- Lower mantle 3\n\telif (r >= 3480000.0) & (r < 3630000.0):\n\t\trho = 7.9565 - 6.4761 * x + 5.5283 * x**2 - 3.0807 * x**3\n\t\tvpv = 15.3891 - 5.3181 * x + 5.5242 * x**2 - 2.5514 * x**3\n\t\tvph = vpv\n\t\tvsv = 6.9254 + 1.4672 * x - 2.0834 * x**2 + 0.9783 * x**3\n\t\tvsh = vsv\n\t\teta = 1.0\n\n\t#- Outer core\n\telif (r >= 1221000.5) & (r < 3480000.0):\n\t\trho = 12.5815 - 1.2638 * x - 3.6426 * x**2 - 5.5281 * x**3\n\t\tvpv = 11.0487 - 4.0362 * x + 4.8023 * x**2 - 13.5732 * x**3\n\t\tvph = vpv\n\t\tvsv = 0.0\n\t\tvsh = 0.0\n\t\teta = 1.0\n\n\t#- Inner Core\n\telif (r >= 0.0) & (r < 1221000.5):\n\t\trho = 13.0885 - 8.8381 * x**2\n\t\tvpv = 11.2622 - 6.3640 * x**2\n\t\tvph = vpv\n\t\tvsv = 3.6678 - 4.4475 * x**2\n\t\tvsh = vsv\n\t\teta = 1.0 \n\n\t#- convert to elastic parameters --------------------------------------------------------------\n\n\trho = 1000.0 * rho\n\tvpv = 1000.0 * vpv\n\tvph = 1000.0 * vph\n\tvsv = 1000.0 * vsv\n\tvsh = 1000.0 * vsh\n\n\tA = rho * vph**2\n\tC = rho * vpv**2\n\tN = rho * vsh**2\n\tL = rho * vsv**2\n\tF = eta * (A - 2 * L)\n\n\treturn rho, A, C, F, L, N",
"def update_variables(self):\n self.dl21 = self.l21-self.l11; self.dl22 = self.l22-self.l12; self.dl23 = self.l23-self.l13;\n self.kappa1, self.phi1, self.seg_len1 = self.configuration_space(self.l11, self.l12, self.l13, self.d, self.n)\n self.kappa2, self.phi2, self.seg_len2 = self.configuration_space(self.dl21, self.dl22, self.dl23, self.d, self.n)\n # aquire transformation matrices and tips for segment 1 and 2\n self.T01_bishop = self.transformation_matrix_bishop(self.kappa1, self.phi1, self.seg_len1)\n self.T12_bishop = self.transformation_matrix_bishop(self.kappa2, self.phi2, self.seg_len2)\n self.T02_bishop = np.matmul(self.T01_bishop, self.T12_bishop)\n self.T01_frenet = self.transformation_matrix_frenet(self.kappa1, self.phi1, self.seg_len1)\n self.T12_frenet = self.transformation_matrix_frenet(self.kappa2, self.phi2, self.seg_len2)\n self.T02_frenet = np.matmul(self.T01_frenet, self.T12_frenet)\n self.tip_vec1 = np.matmul(self.T01_bishop, self.base)[0:3]\n self.tip_vec2 = np.matmul(self.T02_bishop, self.base)[0:3]\n # Frenet frames\n self.normal_vec_frenet1 = self.T01_frenet[0:3, 0]\n self.binormal_vec_frenet1 = self.T01_frenet[0:3, 1]\n self.tangent_vec_frenet1 = self.T01_frenet[0:3, 2]\n self.normal_vec_frenet2 = self.T02_frenet[0:3, 0]\n self.binormal_vec_frenet2 = self.T02_frenet[0:3, 1]\n self.tangent_vec_frenet2 = self.T02_frenet[0:3, 2]\n # Bishop frames\n self.normal_vec_bishop1 = self.T01_bishop[0:3, 0]\n self.binormal_vec_bishop1 = self.T01_bishop[0:3, 1]\n self.tangent_vec_bishop1 = self.T01_bishop[0:3, 2]\n self.normal_vec_bishop2 = self.T02_bishop[0:3, 0]\n self.binormal_vec_bishop2 = self.T02_bishop[0:3, 1]\n self.tangent_vec_bishop2 = self.T02_bishop[0:3, 2]",
"def __init__(self,\n num_class=2,\n layer_nums=(3, 5, 5),\n layer_strides=(2, 2, 2),\n num_filters=(128, 128, 256),\n upsample_strides=(1, 2, 4),\n num_upsample_filters=(256, 256, 256),\n num_input_features=128,\n num_anchor_per_loc=2,\n use_groupnorm=False,\n num_groups=32,\n box_code_size=7,\n num_direction_bins=2):\n super(RPN, self).__init__()\n self._num_anchor_per_loc = num_anchor_per_loc\n self._box_code_size=box_code_size\n self._num_class=num_class\n self._num_direction_bins=num_direction_bins\n assert len(layer_nums) == 3\n assert len(layer_strides) == len(layer_nums)\n assert len(num_filters) == len(layer_nums)\n assert len(upsample_strides) == len(layer_nums)\n assert len(num_upsample_filters) == len(layer_nums)\n upsample_strides=[int(i) for i in upsample_strides]\n\n factors = []\n for i in range(len(layer_nums)):\n assert int(np.prod(\n layer_strides[:i + 1])) % upsample_strides[i] == 0\n factors.append(\n np.prod(layer_strides[:i + 1]) // upsample_strides[i])\n assert all([x == factors[0] for x in factors])\n\n # note that when stride > 1, conv2d with same padding isn't\n # equal to pad-conv2d. we should use pad-conv2d.\n block2_input_filters = num_filters[0]\n if use_groupnorm:\n BatchNorm2d = change_default_args(\n num_groups=num_groups, eps=1e-3)(GroupNorm)\n else:\n BatchNorm2d = change_default_args(\n eps=1e-3, momentum=0.01)(nn.BatchNorm2d)\n\n self.block1 = Sequential(\n nn.ZeroPad2d(1),\n nn.Conv2d(num_input_features, num_filters[0], 3,\n stride=layer_strides[0],bias=False),\n BatchNorm2d(num_filters[0]),\n nn.ReLU(),)\n for i in range(layer_nums[0]):\n self.block1.add(\n nn.Conv2d(num_filters[0], num_filters[0], 3,padding=1,bias=False))\n self.block1.add(BatchNorm2d(num_filters[0]))\n self.block1.add(nn.ReLU())\n self.deconv1 = Sequential(\n nn.ConvTranspose2d(num_filters[0],num_upsample_filters[0],\n upsample_strides[0],stride=upsample_strides[0],bias=False),\n BatchNorm2d(num_upsample_filters[0]),\n nn.ReLU(),)\n self.block2 = Sequential(\n nn.ZeroPad2d(1),\n nn.Conv2d(block2_input_filters,num_filters[1],3,\n stride=layer_strides[1],bias=False),\n BatchNorm2d(num_filters[1]),\n nn.ReLU(),)\n for i in range(layer_nums[1]):\n self.block2.add(\n nn.Conv2d(num_filters[1], num_filters[1], 3, padding=1,bias=False))\n self.block2.add(BatchNorm2d(num_filters[1]))\n self.block2.add(nn.ReLU())\n self.deconv2 = Sequential(\n nn.ConvTranspose2d(num_filters[1],num_upsample_filters[1],\n upsample_strides[1],stride=upsample_strides[1],bias=False),\n BatchNorm2d(num_upsample_filters[1]),\n nn.ReLU(),)\n self.block3 = Sequential(\n nn.ZeroPad2d(1),\n nn.Conv2d(num_filters[1], num_filters[2], 3, stride=layer_strides[2],bias=False),\n BatchNorm2d(num_filters[2]),\n nn.ReLU(),)\n for i in range(layer_nums[2]):\n self.block3.add(nn.Conv2d(num_filters[2], num_filters[2], 3, padding=1,bias=False))\n self.block3.add(BatchNorm2d(num_filters[2]))\n self.block3.add(nn.ReLU())\n self.deconv3 = Sequential(\n nn.ConvTranspose2d(\n num_filters[2],num_upsample_filters[2],\n upsample_strides[2],stride=upsample_strides[2],bias=False),\n BatchNorm2d(num_upsample_filters[2]),\n nn.ReLU(),)\n\n num_cls = num_anchor_per_loc * num_class\n self.conv_cls = nn.Conv2d(sum(num_upsample_filters), num_cls, 1)\n self.conv_box = nn.Conv2d(sum(num_upsample_filters), num_anchor_per_loc * box_code_size, 1)\n self.conv_dir_cls = nn.Conv2d(sum(num_upsample_filters),num_anchor_per_loc * num_direction_bins, 1)",
"def set_up_orbit_correctors(ps_beg, delay, id_slice1, ds_slice, zplot, id_slices, U_core, lambdaref):\n SXSS = Chicane(3.2716, 0.362, 0.830399, delay[0])\n HXSS = Chicane(3.2, 0.3636, 0.5828, delay[1])\n\n OC2 = [CORR08, D1_SXSS, SXSS, D2_SXSS, QUAD09, CORR09]\n OC3 = [CORR15, D1_HXSS, HXSS, D2_HXSS, QUAD16, CORR16]\n\n ps_end1 = beam_transportation(ps_beg, U_core[0])\n\n # ps_end1 is a 4-by-N array. N is the number of macro-particles. It is the full\n # 4D phase space distribution at the end of the first undulator section.\n\n # The id of the slice on the axis in the second undulator section\n on_axis_id_U2 = int(id_slice1+delay[0]/ds_slice+ (8*110)*lambdaref/ds_slice) # The last part is slippage\n\n print(on_axis_id_U2)\n\n ps_end_slice1 = beam_property_along_s(ps_end1, id_slices)[0:4, :]\n ps_on_axis_2 = np.ravel(ps_end_slice1[:, on_axis_id_U2])\n\n # print(ps_on_axis_2)\n\n OC2_optimized = analyze_orbit_corrector(OC2[0], OC2[-1], OC2[1:-1], ps_on_axis_2)\n print(OC2_optimized)\n CORR08_new = Orbit_Corrector(OC2[0].length, OC2_optimized[0], OC2_optimized[2])\n CORR09_new = Orbit_Corrector(OC2[-1].length, OC2_optimized[1], OC2_optimized[3])\n\n # The whole U2 with optimized orbit correctors\n U2_new = [CORR08_new] + OC2[1:-1] + [CORR09_new] + U_core[1]\n ps_end2 = beam_transportation(ps_end1, U2_new)\n\n # ps_end2 is a 4-by-N array. N is the number of macro-particles. It is the full\n # 4D phase space distribution at the end of the second undulator section.\n\n # The id of the slice on the axis in the third undulator section\n on_axis_id_U3 = int(id_slice1+(delay[0]+delay[1])/ds_slice +(14*110*lambdaref)/ds_slice) # The last term is the slipage\n\n print(on_axis_id_U3)\n\n ps_end_slice2 = beam_property_along_s(ps_end2, id_slices)[0:4, :]\n ps_on_axis_3 = np.ravel(ps_end_slice2[ :, on_axis_id_U3])\n\n # print(ps_on_axis_3)\n\n OC3_optimized = analyze_orbit_corrector(OC3[0], OC3[-1], OC3[1:-1], ps_on_axis_3)\n print(OC3_optimized)\n CORR15_new = Orbit_Corrector(OC3[0].length, OC3_optimized[0], OC3_optimized[2])\n CORR16_new = Orbit_Corrector(OC3[-1].length, OC3_optimized[1], OC3_optimized[3])\n\n U3_new = [CORR15_new] + OC3[1:-1] + [CORR16_new] + U_core[2]\n\n Undulator_Beamline = U_core[0]+U2_new+U3_new\n\n return Undulator_Beamline",
"def forward(self, x): \n pal1_sources = list()\n pal2_sources = list()\n loc_pal1 = list()\n conf_pal1 = list()\n loc_pal2 = list()\n conf_pal2 = list()\n\n # apply vgg up to conv3_3 relu\n for k in range(16):\n x = self.vgg[k](x)\n\n of1 = x\n s = self.L2Normof1(of1)\n pal1_sources.append(s)\n \n # apply vgg up to conv4_3 relu\n for k in range(16, 23):\n x = self.vgg[k](x)\n\n of2 = x\n s = self.L2Normof2(of2)\n pal1_sources.append(s)\n\n # apply vgg up to conv5_3 relu\n for k in range(23, 30):\n x = self.vgg[k](x)\n of3 = x\n s = self.L2Normof3(of3)\n pal1_sources.append(s)\n\n # apply vgg up to fc7\n for k in range(30, len(self.vgg)):\n x = self.vgg[k](x)\n of4 = x\n pal1_sources.append(of4)\n \n # apply extra layers and cache source layer outputs\n for k in range(2):\n x = F.relu(self.extras[k](x), inplace=True)\n of5 = x\n pal1_sources.append(of5)\n for k in range(2, 4):\n x = F.relu(self.extras[k](x), inplace=True)\n of6 = x\n pal1_sources.append(of6)\n\n ## fpn module\n \"\"\"\n lfpn6 = self.fpn_topdown6(of6)\n lfpn5 = self._upsample_product(self.fpn_topdown5(of6), self.fpn_latlayer5(of5))\n lfpn4 = self._upsample_product(self.fpn_topdown4(of5), self.fpn_latlayer4(of4))\n lfpn3 = self._upsample_product(self.fpn_topdown3(of4), self.fpn_latlayer3(of3))\n lfpn2 = self._upsample_product(self.fpn_topdown2(of3), self.fpn_latlayer2(of2))\n lfpn1 = self._upsample_product(self.fpn_topdown1(of2), self.fpn_latlayer1(of1))\n\n\n ef1 = self.fpn_fem3_3(lfpn1)\n ef1 = self.L2Normef1(ef1)\n ef2 = self.fpn_fem4_3(lfpn2)\n ef2 = self.L2Normef2(ef2)\n ef3 = self.fpn_fem5_3(lfpn3)\n ef3 = self.L2Normef3(ef3)\n\n ef4 = self.fpn_fem7(lfpn4)\n ef5 = self.fpn_fem6_2(lfpn5)\n ef6 = self.fpn_fem7_2(lfpn6)\n \"\"\"\n\n conv7 = F.relu(self.fpn_topdown[0](of6), inplace=True)\n x = F.relu(self.fpn_topdown[1](conv7), inplace=True)\n conv6 = F.relu(self._upsample_product(x, self.fpn_latlayer[0](of5)), inplace=True)\n\n x = F.relu(self.fpn_topdown[2](conv6), inplace=True)\n convfc7_2 = F.relu(self._upsample_product(x, self.fpn_latlayer[1](of4)), inplace=True)\n\n x = F.relu(self.fpn_topdown[3](convfc7_2), inplace=True)\n conv5 = F.relu(self._upsample_product(x, self.fpn_latlayer[2](of3)), inplace=True)\n\n x = F.relu(self.fpn_topdown[4](conv5), inplace=True)\n conv4 = F.relu(self._upsample_product(x, self.fpn_latlayer[3](of2)), inplace=True)\n\n x = F.relu(self.fpn_topdown[5](conv4), inplace=True)\n conv3 = F.relu(self._upsample_product(x, self.fpn_latlayer[4](of1)), inplace=True)\n\n ef1 = self.fpn_fem[0](conv3)\n ef1 = self.L2Normef1(ef1)\n ef2 = self.fpn_fem[1](conv4)\n ef2 = self.L2Normef2(ef2)\n ef3 = self.fpn_fem[2](conv5)\n ef3 = self.L2Normef3(ef3)\n ef4 = self.fpn_fem[3](convfc7_2)\n ef5 = self.fpn_fem[4](conv6)\n ef6 = self.fpn_fem[5](conv7)\n\n pal2_sources = (ef1, ef2, ef3, ef4, ef5, ef6)\n\n ## first shot \n for (x, l, c) in zip(pal1_sources, self.loc_pal1, self.conf_pal1):\n loc_pal1.append(l(x).permute(0, 2, 3, 1).contiguous())\n conf_pal1.append(c(x).permute(0, 2, 3, 1).contiguous())\n \n ## second shot\n for (x, l, c) in zip(pal2_sources, self.loc_pal2, self.conf_pal2):\n loc_pal2.append(l(x).permute(0, 2, 3, 1).contiguous())\n conf_pal2.append(c(x).permute(0, 2, 3, 1).contiguous())\n\n # first shot\n loc_pal1 = torch.cat([o.view(o.size(0), -1) for o in loc_pal1], 1)\n conf_pal1 = torch.cat([o.view(o.size(0), -1) for o in conf_pal1], 1)\n \n # second shot\n loc_pal2 = torch.cat([o.view(o.size(0), -1) for o in loc_pal2], 1)\n conf_pal2 = torch.cat([o.view(o.size(0), -1) for o in conf_pal2], 1)\n\n if self.phase == 'test':\n # 测试时, 仅使用shot2 的输出\n output = self.detect(\n loc_pal2.view(loc_pal2.size(0), -1, 4),\n self.softmax(conf_pal2.view(conf_pal2.size(0), -1,\n self.num_classes)), # conf preds\n )\n else:\n ## 训练时,使用shot1 和 shot2 的输出\n output = (\n loc_pal1.view(loc_pal1.size(0), -1, 4),\n conf_pal1.view(conf_pal1.size(0), -1, self.num_classes),\n loc_pal2.view(loc_pal2.size(0), -1, 4),\n conf_pal2.view(conf_pal2.size(0), -1, self.num_classes))\n return output",
"def postprocessing(net, initial_marking, final_marking, A, B, pairs, loop_one_list):\n label_transition_dict = {}\n for label in loop_one_list:\n label_transition_dict[label] = PetriNet.Transition(label, label)\n net.transitions.add(label_transition_dict[label])\n\n # F L1L\n # Key is specific loop element\n for key, value in A.items():\n if key in B:\n A_without_B = value - B[key]\n B_without_A = B[key] - value\n pair = (A_without_B, B_without_A)\n for pair_try in pairs:\n in_part = pair_try[0]\n out_part = pair_try[1]\n if pair[0].issubset(in_part) and pair[1].issubset(out_part):\n pair_try_place = PetriNet.Place(str(pair_try))\n add_arc_from_to(label_transition_dict[key], pair_try_place, net)\n add_arc_from_to(pair_try_place, label_transition_dict[key], net)\n return net, initial_marking, final_marking",
"def forward(self, x):\n # l1\n #print(\"INIT SIZE\", torch.cuda.max_memory_allocated())\n #print(\"L1\")\n #print(\"input\", x.shape)\n e1 = self.ec_init(x)\n #print(\"init\", e1.shape)\n syn1 = self.ec11(e1) # init right - l1\n #print(\"syn1\", syn1.shape)\n #print(\"L2\")\n e2 = self.bilinear(syn1, 32, 32, size=self.sizes[2]) # l1-2\n #print(\"e2\", e2.shape)\n # l2\n syn2 = self.ec22(e2) # right l2 (concat later)\n #print(\"syn2\", syn2.shape)\n del e1, e2\n e3 = self.bilinear(syn2, 32, 32, size=self.sizes[3]) # l2-3\n #print(\"L3\")\n #print(\"e3\", e3.shape)\n # l3\n syn3 = self.ec33(e3) # right l3 (concat later)\n #print(\"syn3\", syn3.shape)\n del e3 # delete\n #print(\"L4\")\n e41 = self.bilinear(syn3, 32, 64, size=self.sizes[4]) # l3-l4\n #print(\"e41\", e41.shape)\n\n # l4\n e42 = self.ec441(e41) # right 1 l4\n #print(\"e42\", e42.shape) \n syn4 = self.ec442(e42) # right 2 l4 (concat later)\n #print(\"syn4\", syn4.shape)\n del e41, e42\n #print(\"L5\")\n e51 = self.bilinear(syn4, 64, 128, size=self.sizes[5]) # l4-l5\n #print(\"e51\", e51.shape)\n # l5\n e52 = self.ec551(e51) # right 1\n #print(\"e52\", e52.shape)\n syn5 = self.ec552(e52) # right 2\n #print(\"syn5\", syn5.shape)\n del e51, e52\n #print(\"L6\")\n e61 = self.bilinear(syn5, 128, 128, size=self.sizes[6]) # l5-l6\n #print(\"e61\", e61.shape)\n \n # l6\n e62 = self.ec661(e61) # right 1\n #print(\"e62\", e62.shape)\n syn6 = self.ec662(e62) # right 2\n #print(\"syn6\", syn6.shape)\n del e61, e62\n #print(\"L7\")\n e71 = self.bilinear(syn6, 128, 256, size=self.sizes[7]) #l6-7\n #print(\"e71\", e71.shape)\n \n # l7\n e72 = self.ec771(e71) # right 1 (green)\n #print(\"e72\", e72.shape)\n syn7 = self.ec772(e72) # right 2 (turq)\n #print(\"syn7\", syn7.shape)\n del e71, e72\n\n #print(\"L8\")\n\n #e_bottom_left = self.bilinear(syn7, 256, 4092, size=self.sizes[8]) # l7-l8\n e_bottom_left = self.bilinear(syn7, 256, 256, size=self.sizes[8]) # l7-l8\n #print(\"e_b_l\", e_bottom_left.shape)\n\n # l8 - the very bottom most encoded\n e_bottom_left = e_bottom_left.view(e_bottom_left.size(0), -1)\n batch_size = e_bottom_left.size()[0]\n e_bottom_right = self.ec88(e_bottom_left)\n # TODO - change the view so that 1st arg is batch size again\n e_bottom_right = e_bottom_right.view(batch_size, e_bottom_right.size(1), 1,1,1)\n #print(\"e_b_r\", e_bottom_right.shape)\n\n #print(\"SIZE BEFORE DEL\", torch.cuda.max_memory_allocated())\n if torch.cuda.is_available():\n torch.cuda.empty_cache()\n #print(\"SIZE AFTER DEL\", torch.cuda.max_memory_allocated())\n\n ## DECODE ##\n #print(\"TO CONCAT:\")\n #print(\"Shape1\", self.bilinear(e_bottom_right, 4096, 256, size=self.sizes2[7]).shape)\n #print(\"Shape1\", self.bilinear(e_bottom_right, 256, 256, size=self.sizes2[7]).shape)\n #print(\"syn7 \", syn7.shape)\n # QUESTION - check this is a simple cat - says \"copy and stack\"\n #d71 = torch.cat((self.bilinear(e_bottom_right, 4096, 256, size=self.sizes2[7]), syn7), dim=1) # concat on level 7\n d71 = torch.cat((self.bilinear(e_bottom_right, 256, 256, size=self.sizes2[7]), syn7), dim=1) # concat on level 7\n #print(\"d71 (post cat)\", d71.shape)\n del e_bottom_left, e_bottom_right\n d72 = self.dc77(d71) # move right on level 7 (decode)\n #print(\"d72 (decoded)\", d72.shape)\n del d71, syn7\n\n # TODO - finish\n d61 = torch.cat((self.bilinear(d72, 256, 128, size=self.sizes2[6]), syn6), dim=1)\n del d72, syn6\n d62 = self.dc66(d61)\n\n d51 = torch.cat((self.bilinear(d62, 128, 128, size=self.sizes2[5]), syn5), dim=1)\n del d61, d62, syn5\n d52 = self.dc55(d51)\n\n d41 = torch.cat((self.bilinear(d52, 128, 64, size=self.sizes2[4]), syn4), dim=1)\n del d51, d52, syn4\n d42 = self.dc44(d41)\n\n d31 = torch.cat((self.bilinear(d42, 64, 32, size=self.sizes2[3]), syn3), dim=1)\n del d41, d42, syn3\n d32 = self.dc33(d31)\n\n d21 = torch.cat((self.bilinear(d32, 32, 32, size=self.sizes2[2]), syn2), dim=1)\n del d31, d32, syn2\n d22 = self.dc22(d21)\n\n d11 = torch.cat((self.bilinear(d22, 32, 32, size=self.sizes2[1]), syn1), dim=1)\n del d21, d22, syn1\n d12 = self.dc11(d11)\n return d12\n \"\"\"\n del d11\n # QUESTION\n # is this right or is there only 1 rightward step at top layer\n d0 = self.dc10(d12)\n return d0\n \"\"\"",
"def forward(self, Ca, mask, residue_idx, chain_labels):\n if self.augment_eps > 0:\n Ca = Ca + self.augment_eps * torch.randn_like(Ca)\n\n D_neighbors, E_idx, mask_neighbors = self._dist(Ca, mask)\n\n Ca_0 = torch.zeros(Ca.shape, device=Ca.device)\n Ca_2 = torch.zeros(Ca.shape, device=Ca.device)\n Ca_0[:,1:,:] = Ca[:,:-1,:]\n Ca_1 = Ca\n Ca_2[:,:-1,:] = Ca[:,1:,:]\n\n V, O_features = self._orientations_coarse(Ca, E_idx)\n \n RBF_all = []\n RBF_all.append(self._rbf(D_neighbors)) #Ca_1-Ca_1\n RBF_all.append(self._get_rbf(Ca_0, Ca_0, E_idx)) \n RBF_all.append(self._get_rbf(Ca_2, Ca_2, E_idx))\n\n RBF_all.append(self._get_rbf(Ca_0, Ca_1, E_idx))\n RBF_all.append(self._get_rbf(Ca_0, Ca_2, E_idx))\n\n RBF_all.append(self._get_rbf(Ca_1, Ca_0, E_idx))\n RBF_all.append(self._get_rbf(Ca_1, Ca_2, E_idx))\n\n RBF_all.append(self._get_rbf(Ca_2, Ca_0, E_idx))\n RBF_all.append(self._get_rbf(Ca_2, Ca_1, E_idx))\n\n\n RBF_all = torch.cat(tuple(RBF_all), dim=-1)\n\n\n offset = residue_idx[:,:,None]-residue_idx[:,None,:]\n offset = gather_edges(offset[:,:,:,None], E_idx)[:,:,:,0] #[B, L, K]\n\n d_chains = ((chain_labels[:, :, None] - chain_labels[:,None,:])==0).long()\n E_chains = gather_edges(d_chains[:,:,:,None], E_idx)[:,:,:,0]\n E_positional = self.embeddings(offset.long(), E_chains)\n E = torch.cat((E_positional, RBF_all, O_features), -1)\n \n\n E = self.edge_embedding(E)\n E = self.norm_edges(E)\n \n return E, E_idx",
"def third_step(self, plan, dlvl_sag_img, rphase_sag_img, root_cor_sequence, cor_sequence, sag_sequence):\n \"\"\" Represents the column in the register matrix that represents the coronal sequence used.\n It's used to populate the registration matrix correctly \"\"\"\n column = self.cor_sequences.index(cor_sequence)\n # print('(Step 3) Column: {}\\n'.format(column))\n\n # Respiratory patterns linked to the sequence containing the root image\n lpatterns = [x for x in self.dataset if int(x.split('-')[1]) == cor_sequence]\n # print('(Step 3) Patterns coronal: {} ({})\\n'.format(lpatterns, len(lpatterns)))\n\n # Respiratory patterns linked to the sequence containing the root sagittal image\n pattern = [p for p in lpatterns if int(p.split('-')[3]) == sag_sequence][0]\n # print(\"(Step 3) Pattern: {}\\n\".format(pattern))\n # print(\"(Step 3) DL: {}\\n\".format(dlvl_sag_img))\n # print(\"(Step 3) DL[pos]: {}\\n\".format(dlvl_sag_img[column]))\n\n \"\"\" Get the diaphragmatic level of each image of the analyzed coronal sequence (parallel\n to the root coronal sequence) that crosses the sagittal image registered in the second\n step \"\"\"\n pts_pattern = self.pattern_coronal('{}.png'.format(pattern))\n diaph_lvl = [max(x) for x in self.diaphragmatic_level_coronal(pts_pattern)]\n resp_phase = self.respiratory_phase_coronal(self.diaphragmatic_level_coronal(pts_pattern))\n # print(\"(Step 3) DL sag: {}\\n\".format(dlvl_sag_img))\n # print(\"(Step 3) DL cor: {}\\n\".format(diaph_lvl))\n # print(\"(Step 3) Respiratory phase: {} ({})\\n\".format(resp_phase, len(resp_phase)))\n\n \"\"\" Check register condition:\n 1) If there is same diaphragmatic level \"\"\"\n index_imgs_registered = list() # Store index of the coronal registered images\n for index, i in enumerate(diaph_lvl):\n if i == dlvl_sag_img[column]:\n index_imgs_registered.append(index)\n # print(\"(Step 3) Index of registered images: {} ({})\\n\".format(index_imgs_registered, len(index_imgs_registered)))\n\n \"\"\" Check register condition:\n 2) If the instants are in the same respiratory phase \"\"\"\n for index, i in enumerate(resp_phase):\n if index in index_imgs_registered:\n if resp_phase[index] != rphase_sag_img[column]:\n index_imgs_registered.remove(index)\n # print(\"(Step 3) Index of registered images: {} ({})\\n\".format(index_imgs_registered, len(index_imgs_registered)))\n # c = raw_input(\"?\")\n\n # If there is no registered image\n if len(index_imgs_registered) == 0:\n # return -1, -1, -1\n\n index_imgs_registered =\\\n self.third_step_second_attempt(\n diaph_lvl=diaph_lvl,\n dlvl_sag_img=dlvl_sag_img,\n rphase_sag_img=rphase_sag_img,\n cor_sequence=cor_sequence,\n option=True)\n\n if len(index_imgs_registered) == 0:\n return -1, -1, -1\n\n # Get first sagittal image that was registered with root image\n if len(index_imgs_registered) > 0:\n imgnum = index_imgs_registered[0]\n # print(\"(Step 3) Imagem: {}\\n\".format(imgnum))\n # print(\"(Step 3) DL[pos]: {}\\n\".format(diaph_lvl[imgnum]))\n\n # Fills the matrices\n for i in range(self.matrows):\n pts_pattern = self.pattern_coronal('{}.png'.format(lpatterns[i]))\n diaph_lvl = [max(x) for x in self.diaphragmatic_level_coronal(pts_pattern)]\n # resp_phase =\\\n # self.respiratory_phase_sagittal(self.diaphragmatic_level_sagittal(pts_pattern))\n\n \"\"\" By analyzing a green points:\n - Red points: Appears if a temporal register is found \"\"\"\n if self.matRegistration[i, column] == 0.0 and len(index_imgs_registered) > 0:\n self.matDL[i, column] = diaph_lvl[imgnum]\n self.matRegistration[i, column] = self.yellow\n self.matRP[i, column] = resp_phase[i]\n\n # elif self.matRegistration[i, column] == 2.0 and len(index_imgs_registered) > 0:\n # self.matRegistration[i, column] = self.red\n\n # print(\"(Step 3) Diaphragmatic level matrix:\\n{}\\n\".format(self.matDL))\n # print(\"(Step 3) Registration matrix:\\n{}\\n\".format(self.matRegistration))\n # print(\"(Step 3 Respiratory phase:\\n{}\\n\".format(self.matRP))\n\n # print(\"(Step 3) Diaphragmatic level matrix:\\n{}\\n\".format(self.matDL))\n # print(\"(Step 3) Registration matrix:\\n{}\\n\".format(self.matRegistration))\n # print(\"(Step 3) Respiratory phase:\\n{}\\n\".format(self.matRP))\n\n imgnum = imgnum + 1\n dlvl = [self.matDL[i, column] for i in range(len(self.sag_sequences))]\n rphase = [int(self.matRP[i, column]) for i in range(len(self.sag_sequences))]\n # print(\"(Step 3) Registered mage: {}\\n\".format(imgnum))\n # print(\"(Step 3) DL: {}\\n\".format(dlvl))\n\n return imgnum, dlvl, rphase",
"def to_revolute_chain(self):\n T_zero = {\"p0\": SE3.identity()}\n ang_lims_map = {}\n old_to_new_names = {\n \"p0\": \"p0\"\n } # Returned for user of the method (to map old joint names to new ones)\n ub, lb = spherical_angle_bounds_to_revolute(self.ub, self.lb)\n count = 1\n joint_prev = \"p0\"\n for (\n joint\n ) in self.d: # Assumes the dictionary is in chain order (perhaps enforce?)\n new_node1 = \"p\" + str(count)\n count += 1\n # ub[new_node1] = self.ub[joint]\n # lb[new_node1] = self.lb[joint]\n ang_lims_map[joint] = new_node1\n\n new_node2 = \"p\" + str(count)\n count += 1\n old_to_new_names[joint] = new_node2\n\n Ry = SE3(SO3(roty(np.pi / 2)), np.zeros(3))\n T_zero[new_node1] = T_zero[joint_prev].dot(Ry)\n d = self.d[joint]\n Ry_back = SE3(SO3(roty(-np.pi / 2)), np.zeros(3))\n T_zero[new_node2] = T_zero[new_node1].dot(Ry_back).dot(trans_axis(d, \"z\"))\n\n joint_prev = new_node2\n\n # for key in T_zero:\n # if key not in ub.keys() and key is not 'p0':\n # ub[key] = np.pi\n # lb[key] = -np.pi\n\n params = {\"T_zero\": T_zero, \"ub\": ub, \"lb\": lb}\n return RobotRevolute(params), old_to_new_names, ang_lims_map"
] | [
"0.5802384",
"0.5704981",
"0.553651",
"0.55297977",
"0.55000675",
"0.5490818",
"0.53914285",
"0.5377671",
"0.5346241",
"0.53404874",
"0.5332665",
"0.52918446",
"0.52571434",
"0.52550757",
"0.52523685",
"0.5244776",
"0.5243008",
"0.5238628",
"0.5237757",
"0.52282685",
"0.5177067",
"0.51749367",
"0.51686555",
"0.51682466",
"0.5164786",
"0.516114",
"0.51610285",
"0.5156842",
"0.5138377",
"0.5136782"
] | 0.7835292 | 0 |
Convert a column number into a column letter (3 > 'C') Right shift the column col_idx by 26 to find column letters in reverse order. These numbers are 1based, and can be converted to ASCII ordinals by adding 64. | def _get_column_letter(col_idx):
# these indicies corrospond to A -> ZZZ and include all allowed
# columns
if not 1 <= col_idx <= 18278:
raise ValueError("Invalid column index {0}".format(col_idx))
letters = []
while col_idx > 0:
col_idx, remainder = divmod(col_idx, 26)
# check for exact division and borrow if needed
if remainder == 0:
remainder = 26
col_idx -= 1
letters.append(chr(remainder+64))
return ''.join(reversed(letters)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _index_to_column(i, column=''):\n\n # A dictionary of numbers to letters starting at 0, e.g.\n # {0: 'A', 1: 'B' ...}\n num_to_alpha = {k:v for k, v in enumerate(string.ascii_uppercase, 0)}\n # If our index is divisble by 26, we need to get recursive and add\n # additional letters.\n div = i // 26\n if div:\n column = index_to_column(div - 1, column)\n # Combine results in case things got all inception like.\n column = column + num_to_alpha[i % 26]\n\n return column",
"def get_column_letter_new(column_index):\n # these indicies correspond to A -> ZZZ and include all allowed\n # columns\n if not 1 <= column_index <= 18278:\n msg = 'Column index out of bounds: %s' % column_index\n raise ValueError(msg)\n letters = []\n while column_index > 0:\n column_index, remainder = divmod(column_index, 26)\n # check for exact division and borrow if needed\n if remainder == 0:\n remainder = 26\n column_index -= 1\n letters.append(chr(remainder+64))\n return ''.join(reversed(letters))",
"def num_to_col(n):\n\n # based on https://stackoverflow.com/a/23862195\n string = \"\"\n while n > 0:\n n, remainder = divmod(n - 1, 26)\n string = chr(65 + remainder) + string\n return string",
"def reverseCol(input):\n try:\n parsed = chr(input + ord('A'))\n except TypeError:\n raise PositionException, \"Bad input for col; %s\" % input\n if not 0 <= input < CHESS_COLS:\n raise PositionException, \"Col out of range; %d parsed as %s.\" \\\n % (input, parsed)\n return parsed",
"def convert_number_to_excel_colname(n):\n\n assert 0 < n <= 256\n\n alphabet = [chr(x) for x in xrange(65, 91)]\n\n if n > 26:\n return '{0}{1}'.format(alphabet[(n/26) - 1], alphabet[(n%26) - 1])\n else:\n return alphabet[(n%26) - 1]",
"def getColIdx(self, col):\n try: \n return int(col)\n except:\n return ord(col)-ord('a')",
"def convertColumn(cls, column, row = None):\n\n\t\t#Convert Column if needed\n\t\tif (isinstance(column, int)):\n\t\t\t#Check for past Z\n\t\t\tcount = 0\n\t\t\tbonusColumn = \"\"\n\t\t\twhile True:\n\t\t\t\tcount += 1\n\t\t\t\t#Does the ascii letter go past Z? If so, create addition letter\n\t\t\t\tif (openpyxl.utils.get_column_letter(count).isupper()):\n\t\t\t\t\tbreak\n\t\t\t\telse:\n\t\t\t\t\tcolumn -= 26\n\t\t\t\t\tbonusColumn = openpyxl.utils.get_column_letter(count)\n\n\t\t\t#Set new Column\n\t\t\tcolumn = bonusColumn + openpyxl.utils.get_column_letter(column)\n\n\t\tif (row is None):\n\t\t\treturn column\n\t\treturn f\"{column}{row}\"",
"def getColIdx(self, col):\n try:\n return int(col)\n except:\n return ord(col)-ord('a')",
"def column_to_letter(self, pos):\n column_dict = {}\n column_dict[0] = 'a'\n column_dict[1] = 'b'\n column_dict[2] = 'c'\n column_dict[3] = 'd'\n column_dict[4] = 'e'\n column_dict[5] = 'f'\n column_dict[6] = 'g'\n column_dict[7] = 'h'\n column_dict[8] = 'i'\n return column_dict[pos]",
"def getColName(self, col):\n try:\n return chr(ord('a') + col)\n except:\n return col",
"def getColName(self, col):\n try:\n return chr(ord('a') + col)\n except:\n return col",
"def letter_to_column(self, pos):\n column_dict = {}\n column_dict['a'] = 0\n column_dict['b'] = 1\n column_dict['c'] = 2\n column_dict['d'] = 3\n column_dict['e'] = 4\n column_dict['f'] = 5\n column_dict['g'] = 6\n column_dict['h'] = 7\n column_dict['i'] = 8\n return column_dict[pos[0]]",
"def letter_to_column(self, pos):\n column_dict = {}\n column_dict['a'] = 0\n column_dict['b'] = 1\n column_dict['c'] = 2\n column_dict['d'] = 3\n column_dict['e'] = 4\n column_dict['f'] = 5\n column_dict['g'] = 6\n column_dict['h'] = 7\n column_dict['i'] = 8\n return column_dict[pos[0]]",
"def letter_to_column(self, pos):\n column_dict = {}\n column_dict['a'] = 0\n column_dict['b'] = 1\n column_dict['c'] = 2\n column_dict['d'] = 3\n column_dict['e'] = 4\n column_dict['f'] = 5\n column_dict['g'] = 6\n column_dict['h'] = 7\n column_dict['i'] = 8\n return column_dict[pos[0]]",
"def canonicalize_column_index(self, line, col):\n if col < 0:\n col += self.col_lens[line] + 1\n assert col >= 0\n return col",
"def calculate_ascii(col, col_count, use_third, first_letter, third_letter):\n if col <= 26:\n # if it's under 26 columns, just use a single letter\n ascii_col = chr(col + 64)\n elif use_third:\n if col_count > 26:\n # first_letter describes the coordinate of what the first letter should be -\n # every 26 iterations, it increases by one to switch the first letter up by one\n first_letter += 1\n # col_count keeps track of what column you're at in the current first_letter iteration\n col_count = 1\n if first_letter > 90:\n third_letter += 1\n first_letter = 65\n ascii_col = chr(third_letter) + chr(first_letter) + chr((col_count + 64))\n\n col_count += 1\n else:\n # if it's over 26 columns, you have to calculate two different letters\n if col_count > 26:\n # first_letter describes the coordinate of what the first letter should be -\n # every 26 iterations, it increases by one to switch the first letter up by one\n first_letter += 1\n # col_count keeps track of what column you're at in the current first_letter iteration\n col_count = 1\n\n ascii_col = chr(first_letter) + chr((col_count + 64))\n\n if ascii_col == 'ZZ':\n use_third = True\n\n col_count += 1\n return ascii_col, col_count, use_third, first_letter, third_letter",
"def col_to_num(col_str):\n expn = 0\n col_num = 0\n for char in reversed(col_str):\n col_num += (ord(char) - ord('A') + 1) * (26 ** expn)\n expn += 1\n\n return col_num",
"def excel_style(col):\n result = []\n while col:\n col, rem = divmod(col-1, 26)\n result[:0] = LETTERS[rem]\n return ''.join(result)",
"def toindex(col, row):\n a2z = 'ABCDEFGHIJLKMNOPQRSTUVWXYZ'\n\n total = 0\n mult = 0\n for char in col:\n total += (a2z.find(char) + (26 * mult))\n mult += 1\n\n return total, row - 1",
"def find_index_column(sheet, name, num):\n for idx in range(1, 26):\n if sheet[chr(idx + 64) + str(num)].value == name:\n index_col = chr(64 + idx)\n break\n return index_col",
"def rotate_letter(c, num):\n return chr(((ord(c) - 97) + num) % 26 + 97)",
"def index_to_letter(idx):\r\n if 0 <= idx < 20:\r\n return chr(97 + idx)\r\n else:\r\n raise ValueError('A wrong idx value supplied.')",
"def indexToPosition(self, col, row):\n columns = \"ABCDEFGH\"\n return columns[col] + str(row + 1)",
"def excel_style(row, col):\n quot, rem = divmod(ord(col)-ord('A'), 26)\n return((chr(quot-1 + ord('A')) if quot else '') +\n (chr(rem + ord('A')) + str(row)))",
"def index_to_letter(index):\r\n return chr(index + CHAR_A)",
"def index_to_letter(index):\r\n return chr(index + CHAR_A)",
"def char_from_number(number):\r\n\r\n base = 26\r\n\r\n rval = \"\"\r\n\r\n if number == 0:\r\n rval = 'A'\r\n\r\n while number != 0:\r\n remainder = number % base\r\n new_char = chr(ord('A') + remainder)\r\n rval = new_char + rval\r\n number //= base\r\n\r\n return rval",
"def _index_to_char(self, index):\n return chr(index + ord('a'))",
"def letter_num(num: int):\n if abs(num) > 26 or num == 0:\n let = ord('a') + 26 - 1\n else:\n let = ord('a') + abs(num) - 1\n return chr(let)",
"def rot(c,n):\n if 'a' <= c <= 'z': \n new_ord = ord(c) + n\n if new_ord > ord('z'):\n new_ord = new_ord - 26\n elif 'A' <= c <= 'Z': \n new_ord = ord(c) + n\n if new_ord > ord('Z'):\n new_ord = new_ord - 26\n else: \n new_ord = ord(c)\n return chr(new_ord)"
] | [
"0.7620537",
"0.74437094",
"0.7439917",
"0.7273949",
"0.68757963",
"0.6614841",
"0.65894353",
"0.6583816",
"0.6494394",
"0.64713115",
"0.64713115",
"0.6470613",
"0.6470613",
"0.6470613",
"0.64606607",
"0.6388831",
"0.6317523",
"0.6244326",
"0.61555386",
"0.60580075",
"0.5990195",
"0.59671515",
"0.58490455",
"0.583574",
"0.5821506",
"0.5821506",
"0.57869524",
"0.57469344",
"0.570745",
"0.56874335"
] | 0.7847133 | 0 |
Given a head of LinkedList, delete from that linkedList index j and skip index i iterative | def skip_i_delete_j(head, i, j):
if i == 0:
return None
if head is None or j < 0 or i < 0:
return head
current = head
previous = None
while current:
# skip (i - 1) nodes
for _ in range(i - 1):
if current is None:
return head
current = current.next
previous = current
current = current.next
# delete next j nodes
for _ in range(j):
if current is None:
break
next_node = current.next
current = next_node
previous.next = current
return head | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete(self, index):\n if index == 0 and self.head is not None:\n self.head = self.head.next\n return\n\n current_index = 0\n current = self.head\n previous = None\n\n while current:\n if current_index == index:\n previous.next = current.next\n\n previous = current\n current = current.next\n current_index += 1",
"def deleteAtIndex(self, index):\n cur = self.head\n if cur == None:\n return\n elif index == 0:\n self.head = cur.next\n\n cur, i = self.head, 1\n while cur and i != index:\n cur = cur.next\n i += 1\n if cur.next == None:\n cur = None\n else:\n cur.next = cur.next.next",
"def remove_a_specific_item(self, index):\n\n current = self.head\n previous = None\n for i in range(index):\n previous = current\n current = current.next\n if previous is None: self.head = current.next\n else: previous.next = current.next\n self.size -= 1",
"def deleteAtIndex(self, index: int) -> None:\n if(index == 0):\n self.head = self.head.next\n else:\n prev = None \n cur = self.head \n cnt = 0 \n \n while cur != None:\n if(cnt == index):\n next_node = cur.next\n prev.next = next_node \n return\n else:\n prev = cur \n cur = cur.next\n cnt += 1",
"def delete_by_index(self, index):\n if index < 0 or index >= self.get_size():\n raise IndexError('Index out of bounds')\n if index == 0:\n self.head = self.head.next\n return\n i = 0\n temp = self.head\n while temp is not None:\n if i == index-1:\n temp.next = temp.next.next\n break\n temp = temp.next\n i += 1",
"def deleteAtIndex(self, index):\n\n if index < 0:\n return -1\n\n p = self.head\n while index and p: # 0-index before index-th\n p = p.next\n index -= 1\n\n if p == None or p.next == None:\n return\n if p.next.next:\n p.next.next.prev = p\n p.next = p.next.next\n if p.next == None:\n self.tail = p\n # self.printList()",
"def deleteAtIndex(self, index):\n if index >= 0 and index < self.length:\n prev = None\n curr = self.head\n _next = None\n if curr:\n _next = curr.next\n for i in range(1, index + 1):\n prev = curr\n curr = curr.next\n if curr:\n _next = curr.next\n if prev:\n prev.next = _next\n else:\n self.head = _next\n self.length -= 1",
"def deleteAtIndex(self, index: int) -> None:\n if self.head == None:\n return -1\n curr = self.head\n if index == 0:\n self.head = curr.next\n return\n if index < 0:\n return -1\n for i in range(index - 1):\n curr = curr.next\n if curr is None:\n break\n if curr is None:\n return -1\n if curr.next is None:\n return -1\n \n next = curr.next.next\n curr.next = None\n curr.next = next",
"def erase(self, index):\r\n if index >= self.length():\r\n print(\"ERROR\")\r\n return None\r\n current_index = 0\r\n current_node = self.head\r\n while True:\r\n last_node = current_node\r\n current_node = current_node.next\r\n if current_index == index:\r\n last_node.next = current_node.next\r\n return\r\n current_index += 1",
"def deleteAtIndex(self, index: int) -> None:\n if index < 0 or index >= self.size:\n return\n\n curr = self.head\n for _ in range(index):\n curr = curr.next\n curr.next = curr.next.next\n self.size -= 1",
"def delete_by_index(self, index):\n cur = self.head\n length=self.get_length()\n if type(index) is int:\n if self.is_empty():\n return\n else:\n if index > length:\n # The index value is out of range and prompts and exits\n print(\"Index is out of range.\")\n return\n else:\n if index == 0:\n if cur.next == None:\n self.head = None\n else:\n cur.next.prev = None\n self.head = cur.next\n return\n else:\n while (index) > 0:\n cur = cur.next\n index -= 1\n\n # Point the next node of cur to the next node of cur\n cur.prev.next = cur.next\n # Point the prev of the next node of cur to the previous node of cur\n cur.next.prev = cur.prev\n length -= 1\n return\n else:\n print(\"Index value is not int.\")\n return",
"def deleteAtIndex(self, index):\n cur = self.head\n prev = None\n# self.display(\"deleteAtIndex, deleting value at index \"+str(index))\n if not index:\n head = head.nxt\n if self.tail == cur:\n self.tail = None\n del cur\n return\n \n i = 0\n while i < index and cur:\n prev = cur\n cur = cur.nxt\n i+=1\n if prev:\n if cur:\n prev.nxt = cur.nxt\n if self.tail == cur:\n self.tail = prev\n del cur",
"def delete(self, index):\n # check validity of index:\n if index < 0 or index > self.n:\n print(\"Index Error; please input valid index\")\n return\n # if head element is to be removed,\n if index == 0:\n _ = self.pop_front()\n return\n # else,\n temp_node = self.head\n for _ in range(index-1):\n temp_node = temp_node.next # traverse the list\n index_node = temp_node.next\n # unlink\n temp_node.next = temp_node.next.next\n index_node = None\n self.n -= 1",
"def deleteAtIndex(self, index):\n if index < 0 or index >= self.size:\n return\n\n curr = self.head\n if index == 0:\n self.head = curr.next\n else:\n for i in range(index - 1):\n curr = curr.next\n curr.next = curr.next.next\n\n self.size -= 1",
"def deleteAtIndex(self, index: int) -> None:\n if index < 0 or index > self.cnt-1:\n return \n tmp = self.dummy\n for _ in range(index):\n tmp = tmp.next\n if index == self.cnt - 1:\n tmp.next = None\n else:\n tmp.next = tmp.next.next\n if tmp.next:\n tmp.next.pre = tmp\n self.cnt -= 1",
"def delete_list(self): \n temp_node = self.head\n while temp_node is not None:\n prev_node = temp_node\n temp_node = temp_node.next\n # prev_node.val += \": deleted\" # for sanity check\n # reset data\n prev_node.val = None\n prev_node.next = None",
"def deleteAtIndex(self, index):\n if index >= self.len:\n return\n p = self.head\n while index > 0:\n index -= 1\n p = p.next\n if p.next is self.tail:\n self.tail = p\n p.next = p.next.next\n self.len -= 1",
"def delete(self, ele):\n prev = current = self.head\n element_in_head = False\n if self.head:\n while True:\n\tif current.data == ele:\n\t if current == self.head:\n\t element_in_head = True\n\t else:\n\t prev.next = current.next\n\t break\n\tprev = current\n\tcurrent = current.next\n\tif current == self.head:\n\t break\n if element_in_head:\n\tif self.head.next == self.head:\n\t self.head = None\n\telse:\n\t prev.next = self.head.next\n\t self.head = self.head.next",
"def remove_nth_element(self, position):\n if not self.head or position > self.length() -1:\n raise LinkedListException\n if position == 0 and self.head == self.head.next:\n self.head = None\n else:\n current = self.head\n prev = self.head\n counter = 0\n while counter < position or position == 0:\n counter += 1\n prev = current\n current = current.next\n\tif current == self.head:\n\t break\n if position == 0:\n self.head = current.next\n prev.next = current.next",
"def delete_from_tail(self):\n\n current = self.head\n #get the node right before the tail\n while current != None:\n if current.next == self.tail:\n current.next = None\n return\n current = current.next",
"def delete_node_at_index(self, index):\n if index < 0 or index >= self.size:\n return\n\n curr = self.head\n if index == 0:\n self.head = curr.next\n else:\n for i in range(index - 1):\n curr = curr.next\n curr.next = curr.next.next\n\n self.size -= 1",
"def delete(self, data):\r\n current_node = self.head\r\n current_index = 0\r\n index = self.get_index(data)\r\n while current_node.next != None:\r\n last_node = current_node\r\n current_node = current_node.next\r\n if current_index == index:\r\n last_node.next = current_node.next\r\n return\r\n current_index += 1",
"def remove_index(self, index):\n current = self.head\n position = index\n if index > (self.size() - 1):\n return None\n elif index == 0:\n self.head = current.next_node\n else: \n while position >= 1:\n previous = current\n current = current.next_node\n position -= 1 \n previous.next_node = current.next_node\n\n return current",
"def removeNthFromEnd(self, head: ListNode, n: int) -> ListNode:\n dummy = ListNode(0)\n dummy.next = head\n first = dummy\n second = dummy\n\n for i in range(n + 1):\n first = first.next\n\n while first:\n first = first.next\n second = second.next\n\n second.next = second.next.next\n\n return dummy.next",
"def deleteAtIndex(self, index: int) -> None:\n # if the index is invalid, do nothing\n if index < 0 or index >= self.size:\n return\n \n # find predecessor and successor of the node to be deleted\n if index < self.size - index:\n pred = self.head\n for _ in range(index):\n pred = pred.next\n succ = pred.next.next\n else:\n succ = self.tail\n for _ in range(self.size - index - 1):\n succ = succ.prev\n pred = succ.prev.prev\n \n # delete pred.next \n self.size -= 1\n pred.next = succ\n succ.prev = pred",
"def deleteAtIndex(self, index: int) -> None:\n node = self.get_node(index)\n\n if node:\n #print(\"called inside node to delete is \" + str(node) )\n prev_node = node.prev\n next_node = node.next\n\n if prev_node:\n prev_node.next = next_node\n else:\n self.head = next_node\n if next_node:\n next_node.prev = prev_node\n\n\n\n\n self.node_count -= 1",
"def delete(self, value):\n current = self.head\n index = 1\n ''' delete first element '''\n if index == 1 and current.value == value:\n print (\"deleting first element\")\n current.next = current.next.next\n return\n \n ''' delete last element '''\n while not current.next.next and current.next.value == value:\n print (\"deleting last element\")\n current.next = None\n return\n \n ''' anywhere in between '''\n while current.next.next and current.next.value != value:\n current = current.next\n \n ''' delete the element '''\n print (\"deleting anywhere between element\")\n current.next = current.next.next\n return",
"def delI(current,i):\r\n j=1\r\n while(current.next):\r\n if j<i:\r\n j+=1\r\n current=current.next\r\n elif j==i:\r\n delNode=current.next\r\n nextNode=delNode.next\r\n current.next=nextNode\r\n print('del num is ',delNode)\r\n return nextNode\r\n return False",
"def remove(self, d):\n\n if self.head is not None:\n if self.head.data == d:\n self.head = self.head.next\n else:\n temp = self.head\n while temp.next is not None:\n if temp.next.data == d:\n temp.next = temp.next.next\n break\n else:\n temp = temp.next",
"def erase(self, index):\n if self.empty():\n return \"Linked List is empty\"\n size = self.size()\n if index > size - 1:\n return \"Size of the Linked List is less than the index\"\n\n idx = 0\n h = self.head\n previous = self.head\n while h.next is not None:\n if idx is index:\n if previous is h:\n data = h.data\n self.head = h.next\n return data\n else:\n data = h.data\n previous.next = h.next\n h = None\n return data\n idx += 1\n previous = h\n h = h.next\n\n # Pop the last element\n data = previous.data\n previous.next = None\n return data"
] | [
"0.7670848",
"0.7561153",
"0.73862875",
"0.7377517",
"0.73646194",
"0.73238605",
"0.7250336",
"0.72010875",
"0.71746737",
"0.71504664",
"0.713762",
"0.71322805",
"0.7128258",
"0.7071973",
"0.70681566",
"0.7032534",
"0.69234806",
"0.6903745",
"0.6830704",
"0.6819169",
"0.6818123",
"0.6764261",
"0.67539406",
"0.67496353",
"0.6737122",
"0.670159",
"0.6696203",
"0.6687579",
"0.6655226",
"0.6654771"
] | 0.81535465 | 0 |
Perform a context visibility test. Creates a (fake) image with the specified owner and is_public attributes, then creates a context with the given keyword arguments and expects exp_res as the result of an is_image_visible() call on the context. | def do_visible(self, exp_res, img_owner, img_public, **kwargs):
img = FakeImage(img_owner, img_public)
ctx = context.RequestContext(**kwargs)
self.assertEqual(ctx.is_image_visible(img), exp_res) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def do_sharable(self, exp_res, img_owner, membership=None, **kwargs):\n\n img = FakeImage(img_owner, True)\n ctx = context.RequestContext(**kwargs)\n\n sharable_args = {}\n if membership is not None:\n sharable_args['membership'] = membership\n\n self.assertEqual(ctx.is_image_sharable(img, **sharable_args), exp_res)",
"def create_image_from_visibility(vis, **kwargs) -> Image:\n assert isinstance(vis, Visibility) or isinstance(vis, BlockVisibility), \\\n \"vis is not a Visibility or a BlockVisibility: %r\" % (vis)\n \n log.debug(\"create_image_from_visibility: Parsing parameters to get definition of WCS\")\n \n imagecentre = get_parameter(kwargs, \"imagecentre\", vis.phasecentre)\n phasecentre = get_parameter(kwargs, \"phasecentre\", vis.phasecentre)\n \n # Spectral processing options\n ufrequency = numpy.unique(vis.frequency)\n vnchan = len(ufrequency)\n \n frequency = get_parameter(kwargs, \"frequency\", vis.frequency)\n inchan = get_parameter(kwargs, \"nchan\", vnchan)\n reffrequency = frequency[0] * units.Hz\n channel_bandwidth = get_parameter(kwargs, \"channel_bandwidth\", 0.99999999999 * vis.channel_bandwidth[0]) * units.Hz\n \n if (inchan == vnchan) and vnchan > 1:\n log.debug(\n \"create_image_from_visibility: Defining %d channel Image at %s, starting frequency %s, and bandwidth %s\"\n % (inchan, imagecentre, reffrequency, channel_bandwidth))\n elif (inchan == 1) and vnchan > 1:\n assert numpy.abs(channel_bandwidth.value) > 0.0, \"Channel width must be non-zero for mfs mode\"\n log.debug(\"create_image_from_visibility: Defining single channel MFS Image at %s, starting frequency %s, \"\n \"and bandwidth %s\"\n % (imagecentre, reffrequency, channel_bandwidth))\n elif inchan > 1 and vnchan > 1:\n assert numpy.abs(channel_bandwidth.value) > 0.0, \"Channel width must be non-zero for mfs mode\"\n log.debug(\"create_image_from_visibility: Defining multi-channel MFS Image at %s, starting frequency %s, \"\n \"and bandwidth %s\"\n % (imagecentre, reffrequency, channel_bandwidth))\n elif (inchan == 1) and (vnchan == 1):\n assert numpy.abs(channel_bandwidth.value) > 0.0, \"Channel width must be non-zero for mfs mode\"\n log.debug(\"create_image_from_visibility: Defining single channel Image at %s, starting frequency %s, \"\n \"and bandwidth %s\"\n % (imagecentre, reffrequency, channel_bandwidth))\n else:\n raise ValueError(\"create_image_from_visibility: unknown spectral mode \")\n \n # Image sampling options\n npixel = get_parameter(kwargs, \"npixel\", 512)\n uvmax = numpy.max((numpy.abs(vis.data['uvw'][:, 0:1])))\n if isinstance(vis, BlockVisibility):\n uvmax *= numpy.max(frequency) / constants.c.to('m s^-1').value\n log.debug(\"create_image_from_visibility: uvmax = %f wavelengths\" % uvmax)\n criticalcellsize = 1.0 / (uvmax * 2.0)\n log.debug(\"create_image_from_visibility: Critical cellsize = %f radians, %f degrees\" % (\n criticalcellsize, criticalcellsize * 180.0 / numpy.pi))\n cellsize = get_parameter(kwargs, \"cellsize\", 0.5 * criticalcellsize)\n log.debug(\"create_image_from_visibility: Cellsize = %g radians, %g degrees\" % (cellsize,\n cellsize * 180.0 / numpy.pi))\n override_cellsize = get_parameter(kwargs, \"override_cellsize\", True)\n if override_cellsize and cellsize > criticalcellsize:\n log.debug(\"create_image_from_visibility: Resetting cellsize %g radians to criticalcellsize %g radians\" % (\n cellsize, criticalcellsize))\n cellsize = criticalcellsize\n pol_frame = get_parameter(kwargs, \"polarisation_frame\", PolarisationFrame(\"stokesI\"))\n inpol = pol_frame.npol\n \n # Now we can define the WCS, which is a convenient place to hold the info above\n # Beware of python indexing order! wcs and the array have opposite ordering\n shape = [inchan, inpol, npixel, npixel]\n log.debug(\"create_image_from_visibility: image shape is %s\" % str(shape))\n w = wcs.WCS(naxis=4)\n # The negation in the longitude is needed by definition of RA, DEC\n w.wcs.cdelt = [-cellsize * 180.0 / numpy.pi, cellsize * 180.0 / numpy.pi, 1.0, channel_bandwidth.to(units.Hz).value]\n # The numpy definition of the phase centre of an FFT is n // 2 (0 - rel) so that's what we use for\n # the reference pixel. We have to use 0 rel everywhere.\n w.wcs.crpix = [npixel // 2 + 1, npixel // 2 + 1, 1.0, 1.0]\n w.wcs.ctype = [\"RA---SIN\", \"DEC--SIN\", 'STOKES', 'FREQ']\n w.wcs.crval = [phasecentre.ra.deg, phasecentre.dec.deg, 1.0, reffrequency.to(units.Hz).value]\n w.naxis = 4\n \n # TODO: Why is this check being done?\n # direction_centre = pixel_to_skycoord(npixel // 2 + 1, npixel // 2 + 1, wcs=w, origin=1)\n # assert direction_centre.separation(imagecentre).value < 1e-7, \\\n # \"Image phase centre [npixel//2, npixel//2] should be %s, actually is %s\" % \\\n # (str(imagecentre), str(direction_centre))\n \n w.wcs.radesys = get_parameter(kwargs, 'frame', 'ICRS')\n w.wcs.equinox = get_parameter(kwargs, 'equinox', 2000.0)\n \n return create_image_from_array(numpy.zeros(shape), wcs=w, polarisation_frame=pol_frame)",
"def test_auth_public_owned(self):\n self.do_visible(True, 'pattieblack', True, tenant='pattieblack')",
"def test_empty_private_owned(self):\n self.do_visible(True, 'pattieblack', False, is_admin=True)",
"def test_anon_public_owned(self):\n self.do_visible(True, 'pattieblack', True)",
"def test_auth_private_owned(self):\n self.do_visible(True, 'pattieblack', False, tenant='pattieblack')",
"def test_anon_private_owned(self):\n self.do_visible(False, 'pattieblack', False)",
"def test_empty_public_owned(self):\n self.do_visible(True, 'pattieblack', True, is_admin=True)",
"def test_anon_private(self):\n self.do_visible(True, None, False)",
"def test_anon_public(self):\n self.do_visible(True, None, True)",
"def test_empty_private(self):\n self.do_visible(True, None, False, is_admin=True)",
"def test_visibility(self, data, visible):\n layer = Points(data)\n assert layer.visible is True\n\n layer = Points(data, visible=visible)\n assert layer.visible is visible\n\n layer.visible = not visible\n assert layer.visible is not visible",
"def test_auth_public_unowned(self):\n self.do_visible(True, 'pattieblack', True, tenant='froggy')",
"def test_auth_private(self):\n self.do_visible(True, None, False, tenant='froggy')",
"def test_empty_public(self):\n self.do_visible(True, None, True, is_admin=True)",
"def test_auth_private_unowned(self):\n self.do_visible(False, 'pattieblack', False, tenant='froggy')",
"def test_auth_public(self):\n self.do_visible(True, None, True, tenant='froggy')",
"def glance_update_and_set_public(glance, image, image_info):\n image_properties = image_info['image_properties']\n try:\n logger.debug(\"glance image update: properties=%s\", image_properties)\n glance.images.update(image.id, **image_properties)\n logger.debug(\"glance image update: visibility=public\")\n glance.images.update(image.id, visibility='public')\n except Exception:\n logger.exception(\"Updating (-> public) Glance image '%s' [%s] failed\", image.name, image.id)\n return 1\n\n return 0",
"def builder_should_create_target_image(self, builder, target, image_id, template, parameters):",
"def test_aws_service_api_private_image_get(self):\n pass",
"def expose(self, cmd):\n\n expType = cmd.cmd.keywords[0].name\n if expType in ('bias', 'test'):\n expTime = 0.0\n else:\n expTime = cmd.cmd.keywords[\"expTime\"].values[0]\n\n filename, image = self._doExpose(cmd, expTime, expType)\n cmd.finish('exposureState=done')",
"def test_aws_service_api_private_images_get(self):\n pass",
"def test_public_user(self):\n set_permission(Permission.SHARE, self.user1, self.collection)\n\n data = {\"public\": \"view\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n\n data = {\"public\": \"none\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n\n data = {\"public\": \"edit\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n data = {\"public\": \"share\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n data = {\"public\": \"owner\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)",
"def targetWeldCtx(*args, exists: bool=True, image1: Union[AnyStr, bool]=\"\", image2:\n Union[AnyStr, bool]=\"\", image3: Union[AnyStr, bool]=\"\", mergeToCenter:\n bool=True, q=True, query=True, e=True, edit=True, **kwargs)->Union[None,\n Any]:\n pass",
"def create_image_allowed(self, create_image_allowed):\n self._create_image_allowed = create_image_allowed",
"def test_filter_public_permissions(self):\n data = {\"public\": \"view\"}\n check_public_permissions(data)\n\n data = {\"public\": \"edit\"}\n with self.assertRaises(exceptions.PermissionDenied):\n check_public_permissions(data)\n\n data = {\"public\": \"share\"}\n with self.assertRaises(exceptions.PermissionDenied):\n check_public_permissions(data)\n\n data = {\"public\": \"owner\"}\n with self.assertRaises(exceptions.PermissionDenied):\n check_public_permissions(data)",
"def embed_condition_images(condition_image,\n scope,\n reuse=tf.AUTO_REUSE,\n fc_layers = None,\n use_spatial_softmax = True):\n if len(condition_image.shape) != 4:\n raise ValueError('Image has unexpected shape {}.'.format(\n condition_image.shape))\n with tf.variable_scope(scope, reuse=reuse, use_resource=True):\n image_embedding, _ = vision_layers.BuildImagesToFeaturesModel(\n condition_image, use_spatial_softmax=use_spatial_softmax)\n if fc_layers is not None:\n if len(image_embedding.shape) == 2:\n image_embedding = layers.stack(\n image_embedding,\n layers.fully_connected,\n fc_layers[:-1],\n activation_fn=tf.nn.relu,\n normalizer_fn=layers.layer_norm)\n image_embedding = layers.fully_connected(\n image_embedding, fc_layers[-1], activation_fn=None)\n else:\n image_embedding = layers.stack(\n image_embedding,\n layers.conv2d,\n fc_layers[:-1],\n kernel_size=[1, 1],\n activation_fn=tf.nn.relu,\n normalizer_fn=layers.layer_norm)\n image_embedding = layers.conv2d(\n image_embedding, fc_layers[-1], activation_fn=None)\n return image_embedding",
"def _doExpose(self, cmd, expTime, expType):\n \n image = self.actor.camera.expose(cmd, expTime, expType)\n filename = self.getNextFilename(cmd)\n pyfits.writeto(filename, image, checksum=False, clobber=True)\n cmd.inform(\"filename=%s\" % (qstr(filename)))\n \n return filename, image",
"def projectionContext(*args, exists: bool=True, history: bool=True, image1: Union[AnyStr,\n bool]=\"\", image2: Union[AnyStr, bool]=\"\", image3: Union[AnyStr, bool]=\"\",\n name: AnyStr=\"\", q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass",
"def test_should_render_for_owner_unpublished(self) -> None:\n self.assertTrue(self.action.should_render(\n context=self._create_request_context(\n public=False)))"
] | [
"0.55633384",
"0.54331094",
"0.5389461",
"0.5342315",
"0.5338732",
"0.53361624",
"0.5331995",
"0.53111786",
"0.5263905",
"0.519634",
"0.5192311",
"0.5132145",
"0.51142937",
"0.5112023",
"0.50558454",
"0.49886537",
"0.49874082",
"0.49178144",
"0.47592",
"0.4705346",
"0.46915755",
"0.46870592",
"0.46700382",
"0.46570787",
"0.4648981",
"0.4646109",
"0.46335655",
"0.46271664",
"0.4621221",
"0.46063378"
] | 0.7743895 | 0 |
Perform a context sharability test. Creates a (fake) image with the specified owner and is_public attributes, then creates a context with the given keyword arguments and expects exp_res as the result of an is_image_sharable() call on the context. If membership is not None, its value will be passed in as the 'membership' keyword argument of is_image_sharable(). | def do_sharable(self, exp_res, img_owner, membership=None, **kwargs):
img = FakeImage(img_owner, True)
ctx = context.RequestContext(**kwargs)
sharable_args = {}
if membership is not None:
sharable_args['membership'] = membership
self.assertEqual(ctx.is_image_sharable(img, **sharable_args), exp_res) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_auth_sharable_can_share(self):\n self.do_sharable(True, 'pattieblack', FakeMembership(True),\n tenant='froggy')",
"def do_visible(self, exp_res, img_owner, img_public, **kwargs):\n\n img = FakeImage(img_owner, img_public)\n ctx = context.RequestContext(**kwargs)\n\n self.assertEqual(ctx.is_image_visible(img), exp_res)",
"def test_anon_shared(self):\n self.do_sharable(False, 'pattieblack', None)\n self.do_sharable(False, 'pattieblack', FakeMembership(True))",
"def test_auth_sharable_owned(self):\n self.do_sharable(True, 'pattieblack', None, tenant='pattieblack')",
"def test_auth_sharable_cannot_share(self):\n self.do_sharable(False, 'pattieblack', FakeMembership(False),\n tenant='froggy')",
"def test_empty_shared(self):\n self.do_sharable(False, 'pattieblack', None, is_admin=True)\n self.do_sharable(False, 'pattieblack', FakeMembership(True),\n is_admin=True)",
"async def shrug(self,ctx,user: discord.Member=None):\n if user == None or user.id == ctx.author.id:\n await ctx.send(\"{}\".format(ctx.author.mention))\n else:\n await ctx.send(\"{} {}\".format(ctx.author.mention, user.mention))\n img = random.choice(self.getreaction(\"shrug\", \"0\"))\n embed = discord.Embed(colour=ctx.guild.me.top_role.colour)\n embed.set_image(url=img)\n await ctx.send(embed=embed)",
"def test_auth_sharable(self):\n self.do_sharable(False, 'pattieblack', None, tenant='froggy')",
"def builder_should_create_target_image(self, builder, target, image_id, template, parameters):",
"def create_image_from_visibility(vis, **kwargs) -> Image:\n assert isinstance(vis, Visibility) or isinstance(vis, BlockVisibility), \\\n \"vis is not a Visibility or a BlockVisibility: %r\" % (vis)\n \n log.debug(\"create_image_from_visibility: Parsing parameters to get definition of WCS\")\n \n imagecentre = get_parameter(kwargs, \"imagecentre\", vis.phasecentre)\n phasecentre = get_parameter(kwargs, \"phasecentre\", vis.phasecentre)\n \n # Spectral processing options\n ufrequency = numpy.unique(vis.frequency)\n vnchan = len(ufrequency)\n \n frequency = get_parameter(kwargs, \"frequency\", vis.frequency)\n inchan = get_parameter(kwargs, \"nchan\", vnchan)\n reffrequency = frequency[0] * units.Hz\n channel_bandwidth = get_parameter(kwargs, \"channel_bandwidth\", 0.99999999999 * vis.channel_bandwidth[0]) * units.Hz\n \n if (inchan == vnchan) and vnchan > 1:\n log.debug(\n \"create_image_from_visibility: Defining %d channel Image at %s, starting frequency %s, and bandwidth %s\"\n % (inchan, imagecentre, reffrequency, channel_bandwidth))\n elif (inchan == 1) and vnchan > 1:\n assert numpy.abs(channel_bandwidth.value) > 0.0, \"Channel width must be non-zero for mfs mode\"\n log.debug(\"create_image_from_visibility: Defining single channel MFS Image at %s, starting frequency %s, \"\n \"and bandwidth %s\"\n % (imagecentre, reffrequency, channel_bandwidth))\n elif inchan > 1 and vnchan > 1:\n assert numpy.abs(channel_bandwidth.value) > 0.0, \"Channel width must be non-zero for mfs mode\"\n log.debug(\"create_image_from_visibility: Defining multi-channel MFS Image at %s, starting frequency %s, \"\n \"and bandwidth %s\"\n % (imagecentre, reffrequency, channel_bandwidth))\n elif (inchan == 1) and (vnchan == 1):\n assert numpy.abs(channel_bandwidth.value) > 0.0, \"Channel width must be non-zero for mfs mode\"\n log.debug(\"create_image_from_visibility: Defining single channel Image at %s, starting frequency %s, \"\n \"and bandwidth %s\"\n % (imagecentre, reffrequency, channel_bandwidth))\n else:\n raise ValueError(\"create_image_from_visibility: unknown spectral mode \")\n \n # Image sampling options\n npixel = get_parameter(kwargs, \"npixel\", 512)\n uvmax = numpy.max((numpy.abs(vis.data['uvw'][:, 0:1])))\n if isinstance(vis, BlockVisibility):\n uvmax *= numpy.max(frequency) / constants.c.to('m s^-1').value\n log.debug(\"create_image_from_visibility: uvmax = %f wavelengths\" % uvmax)\n criticalcellsize = 1.0 / (uvmax * 2.0)\n log.debug(\"create_image_from_visibility: Critical cellsize = %f radians, %f degrees\" % (\n criticalcellsize, criticalcellsize * 180.0 / numpy.pi))\n cellsize = get_parameter(kwargs, \"cellsize\", 0.5 * criticalcellsize)\n log.debug(\"create_image_from_visibility: Cellsize = %g radians, %g degrees\" % (cellsize,\n cellsize * 180.0 / numpy.pi))\n override_cellsize = get_parameter(kwargs, \"override_cellsize\", True)\n if override_cellsize and cellsize > criticalcellsize:\n log.debug(\"create_image_from_visibility: Resetting cellsize %g radians to criticalcellsize %g radians\" % (\n cellsize, criticalcellsize))\n cellsize = criticalcellsize\n pol_frame = get_parameter(kwargs, \"polarisation_frame\", PolarisationFrame(\"stokesI\"))\n inpol = pol_frame.npol\n \n # Now we can define the WCS, which is a convenient place to hold the info above\n # Beware of python indexing order! wcs and the array have opposite ordering\n shape = [inchan, inpol, npixel, npixel]\n log.debug(\"create_image_from_visibility: image shape is %s\" % str(shape))\n w = wcs.WCS(naxis=4)\n # The negation in the longitude is needed by definition of RA, DEC\n w.wcs.cdelt = [-cellsize * 180.0 / numpy.pi, cellsize * 180.0 / numpy.pi, 1.0, channel_bandwidth.to(units.Hz).value]\n # The numpy definition of the phase centre of an FFT is n // 2 (0 - rel) so that's what we use for\n # the reference pixel. We have to use 0 rel everywhere.\n w.wcs.crpix = [npixel // 2 + 1, npixel // 2 + 1, 1.0, 1.0]\n w.wcs.ctype = [\"RA---SIN\", \"DEC--SIN\", 'STOKES', 'FREQ']\n w.wcs.crval = [phasecentre.ra.deg, phasecentre.dec.deg, 1.0, reffrequency.to(units.Hz).value]\n w.naxis = 4\n \n # TODO: Why is this check being done?\n # direction_centre = pixel_to_skycoord(npixel // 2 + 1, npixel // 2 + 1, wcs=w, origin=1)\n # assert direction_centre.separation(imagecentre).value < 1e-7, \\\n # \"Image phase centre [npixel//2, npixel//2] should be %s, actually is %s\" % \\\n # (str(imagecentre), str(direction_centre))\n \n w.wcs.radesys = get_parameter(kwargs, 'frame', 'ICRS')\n w.wcs.equinox = get_parameter(kwargs, 'equinox', 2000.0)\n \n return create_image_from_array(numpy.zeros(shape), wcs=w, polarisation_frame=pol_frame)",
"async def thumbsup(self,ctx,user: discord.Member=None):\n if user == None or user.id == ctx.author.id:\n await ctx.send(\"{}\".format(ctx.author.mention))\n else:\n await ctx.send(\"{} {}\".format(ctx.author.mention, user.mention))\n img = random.choice(self.getreaction(\"thumbsup\", \"0\"))\n embed = discord.Embed(colour=ctx.guild.me.top_role.colour)\n embed.set_image(url=img)\n await ctx.send(embed=embed)",
"def maketestimage(self, *args, **kwargs):\n return _image.image_maketestimage(self, *args, **kwargs)",
"def export_prepared_image(self, **kwargs):\n owner = kwargs.pop(\"owner\", None)\n indent = kwargs.pop(\"indent\", 2)\n key = _Texture(**kwargs)\n image = key.image\n\n if key not in self._pending:\n self._report.msg(\"Stashing '{}' for conversion as '{}'\", image.name, key, indent=indent)\n self._pending[key] = [owner.key,]\n else:\n self._report.msg(\"Found another user of '{}'\", key, indent=indent)\n self._pending[key].append(owner.key)",
"def testImageHandling(self):\n \n pm = getToolByName(self.portal, 'portal_membership')\n #make sure the person's member portrait isn't defined\n self.failUnless(pm.getPersonalPortrait('abc123').__name__ in ['defaultUser.gif', 'defaultUser.png'])\n \n # Delete the (nonexistant) image, make sure the portrait stays undefined\n self.person.setImage('DELETE_IMAGE')\n self.failUnless(pm.getPersonalPortrait('abc123').__name__ in ['defaultUser.gif', 'defaultUser.png'])\n \n self.person.setImage(TEST_GIF, content_type=\"image/gif\")\n #self.failUnlessEqual(self.person.getImage().data, TEST_GIF)\n # Try to get a 10x10 version of the image\n imageOfSizeTag = self.person.getImageOfSize(10, 10)\n self.failUnlessEqual(imageOfSizeTag, '<img src=\"http://nohost/plone/facstaffdirectory/abc123/image\" alt=\"Test Person\" title=\"Test Person\" height=\"10\" width=\"10\" />')\n self.failUnlessEqual(pm.getPersonalPortrait('abc123').__name__, 'abc123')\n \n # Try to get a scaled-by-ratio image with a width of 100.\n scaledImageTag = self.person.getScaledImageByWidth(100)\n self.failUnlessEqual(scaledImageTag, '<img src=\"http://nohost/plone/facstaffdirectory/abc123/image\" alt=\"Test Person\" title=\"Test Person\" height=\"150\" width=\"100\" />')\n \n # Delete the image, make sure the portrait is deleted as well\n self.person.setImage('DELETE_IMAGE')\n self.failUnless(pm.getPersonalPortrait('abc123').__name__ in ['defaultUser.gif', 'defaultUser.png'])\n \n #self.person.setImage(TEST_JPEG, content_type=\"image/jpeg\")\n #self.failUnlessEqual(self.person.getImage().data, TEST_JPEG)\n \n self.person.setImage(TEST_TIFF, content_type=\"image/tiff\")\n #self.failUnlessEqual(self.person.getImage().data, TEST_TIFF)\n # Try to get a 10x10 version of the image\n imageOfSizeTag = self.person.getImageOfSize(10, 10)\n self.failUnlessEqual(imageOfSizeTag, '<img src=\"http://nohost/plone/facstaffdirectory/abc123/image\" alt=\"Test Person\" title=\"Test Person\" height=\"10\" width=\"10\" />')\n \n # Try to get a scaled-by-ratio image with a width of 100.\n # TIFF handling in Plone is broken (probably the fault of PIL), handle the problem nicely.\n scaledImageTag = self.person.getScaledImageByWidth(100)\n self.failUnless(scaledImageTag == '<img src=\"http://nohost/plone/facstaffdirectory/abc123/image\" alt=\"Test Person\" title=\"Test Person\" height=\"150\" width=\"100\" />' or scaledImageTag == '')",
"def test_api_thumbnail_retrieve_by_organization_student(self):\n organization_access = OrganizationAccessFactory(\n organization=self.some_organization,\n role=STUDENT,\n )\n\n self.assert_user_cannot_retrieve_thumbnail(\n organization_access.user, self.some_thumbnail\n )",
"async def cry(self,ctx,user: discord.Member=None):\n if user == None or user.id == ctx.author.id:\n await ctx.send(\"{}\".format(ctx.author.mention))\n else:\n await ctx.send(\"{} {}\".format(ctx.author.mention, user.mention))\n img = random.choice(self.getreaction(\"cry\", \"0\"))\n embed = discord.Embed(colour=ctx.guild.me.top_role.colour)\n embed.set_image(url=img)\n await ctx.send(embed=embed)",
"def create_image(image_url, owner, permission=\"PRIVATE\"):\n\n image = Image(image_url=image_url,\n owner=owner,\n permission=permission)\n \n db.session.add(image)\n db.session.commit()\n return image",
"def create_image_allowed(self, create_image_allowed):\n self._create_image_allowed = create_image_allowed",
"def test_public_user(self):\n set_permission(Permission.SHARE, self.user1, self.collection)\n\n data = {\"public\": \"view\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n\n data = {\"public\": \"none\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n\n data = {\"public\": \"edit\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n data = {\"public\": \"share\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n data = {\"public\": \"owner\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)",
"def test_auth_sharable_admin(self):\n self.do_sharable(True, 'pattieblack', None, tenant='froggy',\n is_admin=True)",
"def get_images_by_vulnerability(self, **kwargs):\n ...",
"def test_create_image(self):\n pass",
"async def smug(self,ctx,user: discord.Member=None):\n if user == None or user.id == ctx.author.id:\n await ctx.send(\"{}\".format(ctx.author.mention))\n else:\n await ctx.send(\"{} {}\".format(ctx.author.mention, user.mention))\n img = random.choice(self.getreaction(\"smug\", \"0\"))\n embed = discord.Embed(colour=ctx.guild.me.top_role.colour)\n embed.set_image(url=img)\n await ctx.send(embed=embed)",
"def targetWeldCtx(*args, exists: bool=True, image1: Union[AnyStr, bool]=\"\", image2:\n Union[AnyStr, bool]=\"\", image3: Union[AnyStr, bool]=\"\", mergeToCenter:\n bool=True, q=True, query=True, e=True, edit=True, **kwargs)->Union[None,\n Any]:\n pass",
"def srtContext(*args, exists: bool=True, history: bool=True, image1: Union[AnyStr, bool]=\"\",\n image2: Union[AnyStr, bool]=\"\", image3: Union[AnyStr, bool]=\"\", name: AnyStr=\"\",\n q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass",
"def prepared_image_file(create_filesystem=True):\n # Create a 10 MB image file and a key file of 2048 bytes.\n execute('dd', 'if=/dev/zero', 'of=%s' % IMAGE_FILE, 'bs=1M', 'count=10')\n execute('dd', 'if=/dev/urandom', 'of=%s' % KEY_FILE, 'bs=512', 'count=4')\n # Encrypt and unlock the image file.\n execute('cryptsetup', '--batch-mode', 'luksFormat', IMAGE_FILE, KEY_FILE, sudo=True)\n # Create a filesystem on the encrypted image file?\n if create_filesystem:\n with unlocked_device(CRYPTO_NAME):\n execute('mkfs.ext4', FILESYSTEM_DEVICE, sudo=True)\n yield\n os.unlink(IMAGE_FILE)\n os.unlink(KEY_FILE)",
"def test_compute_image_sharpness(self):\n yuv_full_scale = 1023.0\n chart_file = os.path.join(os.environ['CAMERA_ITS_TOP'], 'pymodules',\n 'its', 'test_images', 'ISO12233.png')\n chart = cv2.imread(chart_file, cv2.IMREAD_ANYDEPTH)\n white_level = numpy.amax(chart).astype(float)\n sharpness = {}\n for j in [2, 4, 8]:\n blur = cv2.blur(chart, (j, j))\n blur = blur[:, :, numpy.newaxis]\n sharpness[j] = (yuv_full_scale *\n its.image.compute_image_sharpness(blur /\n white_level))\n self.assertTrue(numpy.isclose(sharpness[2]/sharpness[4],\n numpy.sqrt(2), atol=0.1))\n self.assertTrue(numpy.isclose(sharpness[4]/sharpness[8],\n numpy.sqrt(2), atol=0.1))",
"def image_check(kwargs) -> bool:\n\n # Kwarg argument check\n return kwarg_check(\n kwargs=kwargs,\n options=[\n \"min_captured_at\",\n \"max_captured_at\",\n \"radius\",\n \"image_type\",\n \"organization_id\",\n \"fields\",\n ],\n callback=\"image_check\",\n )",
"def simple_test(self, img, img_meta, **kwargs):\n pass",
"def shazoo(tree_adj, nodes_status, edge_weight, hinge_lines, nodes_sign,\n gold_sign):\n from grid_stretch import ancestor_info\n order = list(gold_sign.keys())\n random.shuffle(order)\n allpred = {}\n node = order[0]\n nodes_sign[node] = gold_sign[node]\n nodes_status[node] = REVEALED # no need for full reveal call\n ancestors = ancestor_info(tree_adj, node)\n allpred[node] = -1\n for node in order[1:]:\n pred = predict_node_sign(tree_adj, node, nodes_status, nodes_sign,\n hinge_lines, edge_weight)\n allpred[node] = pred\n nodes_sign[node] = gold_sign[node]\n reveal_node(tree_adj, node, nodes_status, hinge_lines, ancestors)\n mistakes = sum((1 for n, p in allpred.items() if p != gold_sign[n]))\n print('mistakes: {}'.format(mistakes))"
] | [
"0.54896724",
"0.54389876",
"0.52578163",
"0.5243541",
"0.5127175",
"0.50320935",
"0.49899673",
"0.49097702",
"0.4775629",
"0.47158346",
"0.47047496",
"0.4655331",
"0.4621686",
"0.46000612",
"0.45601118",
"0.45558378",
"0.45160365",
"0.4514471",
"0.45083925",
"0.45038795",
"0.44405332",
"0.44366503",
"0.43388307",
"0.43358678",
"0.43294334",
"0.43245113",
"0.4307198",
"0.43041328",
"0.42939383",
"0.42830762"
] | 0.8215476 | 0 |
Tests that an empty context (with is_admin set to True) can access an owned image with is_public set to True. | def test_empty_public_owned(self):
self.do_visible(True, 'pattieblack', True, is_admin=True) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_empty_private_owned(self):\n self.do_visible(True, 'pattieblack', False, is_admin=True)",
"def test_auth_public_unowned(self):\n self.do_visible(True, 'pattieblack', True, tenant='froggy')",
"def test_should_render_for_owner_unpublished(self) -> None:\n self.assertTrue(self.action.should_render(\n context=self._create_request_context(\n public=False)))",
"def test_auth_public_owned(self):\n self.do_visible(True, 'pattieblack', True, tenant='pattieblack')",
"def do_visible(self, exp_res, img_owner, img_public, **kwargs):\n\n img = FakeImage(img_owner, img_public)\n ctx = context.RequestContext(**kwargs)\n\n self.assertEqual(ctx.is_image_visible(img), exp_res)",
"def test_empty_public(self):\n self.do_visible(True, None, True, is_admin=True)",
"def test_anon_public_owned(self):\n self.do_visible(True, 'pattieblack', True)",
"def test_auth_private_unowned(self):\n self.do_visible(False, 'pattieblack', False, tenant='froggy')",
"def test_empty_private(self):\n self.do_visible(True, None, False, is_admin=True)",
"def test_logged_in_owner(self):\n self.make_logged_in_owner()\n\n # test show album\n self.perm_escalate_helper(self.albumcontrol, self.showalbumrequest, self.testalbum, self.testalbum.id,\n self.u, album.display_album, ALBUM_PRIVATE)\n\n # test photo view\n self.perm_escalate_helper_get_with_client(self.albumcontrol, self.testalbum, self.photo.id,\n \"photoid\", \"show_photo\", ALBUM_PRIVATE)\n\n # test individual photo view page\n self.perm_escalate_helper(self.albumcontrol, self.indivphotorequest, self.testalbum, self.photo.id,\n self.u, album.display_photo, ALBUM_PRIVATE)",
"def test_aws_service_api_private_image_get(self):\n pass",
"def test_aws_service_api_private_images_get(self):\n pass",
"def test_public_user(self):\n set_permission(Permission.SHARE, self.user1, self.collection)\n\n data = {\"public\": \"view\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n\n data = {\"public\": \"none\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n\n data = {\"public\": \"edit\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n data = {\"public\": \"share\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n data = {\"public\": \"owner\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)",
"def test_get_owner_image(self):\n\n # user1 is owner of image_id 1\n # user2 is owner of image ids (2,3)\n for image_id in range(1, 4):\n url = reverse(self.url_name_one, args=(image_id,))\n if image_id == 1:\n self.client.force_authenticate(self.user1)\n else:\n self.client.force_authenticate(self.user2)\n\n response = self.client.get(url, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n if image_id == 1:\n self.assertEqual(response.data[\"owner\"], \"user1\")\n else:\n self.assertEqual(response.data[\"owner\"], \"user2\")\n\n # user2 try to get image_id 1 which is owner user1\n url = reverse(self.url_name_one, args=(1,))\n response = self.client.get(url, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def test_auth_private_owned(self):\n self.do_visible(True, 'pattieblack', False, tenant='pattieblack')",
"def test_anon_private_owned(self):\n self.do_visible(False, 'pattieblack', False)",
"def test_aws_service_api_public_image_get(self):\n pass",
"def test_admin_public(self):\n with self.login(self.user_admin):\n self.assertTrue(current_user.is_authenticated)\n self.assertEqual(current_user, self.user_admin)\n\n rv = self.client.get('/')\n self.assertEqual(b'public', rv.data)",
"def test_permissions(self):\n taxonomy = self.get_taxonomy()\n return True if self.request.user == taxonomy.author else taxonomy.public",
"def test_aws_service_api_public_images_get(self):\n pass",
"def test_auth_public(self):\n self.do_visible(True, None, True, tenant='froggy')",
"def test_filter_public_permissions(self):\n data = {\"public\": \"view\"}\n check_public_permissions(data)\n\n data = {\"public\": \"edit\"}\n with self.assertRaises(exceptions.PermissionDenied):\n check_public_permissions(data)\n\n data = {\"public\": \"share\"}\n with self.assertRaises(exceptions.PermissionDenied):\n check_public_permissions(data)\n\n data = {\"public\": \"owner\"}\n with self.assertRaises(exceptions.PermissionDenied):\n check_public_permissions(data)",
"def test_not_logged_in(self):\n\n # test show album\n self.perm_escalate_helper(self.albumcontrol, self.showalbumrequest, self.testalbum, self.testalbum.id,\n AnonymousUser(), album.display_album, ALBUM_PUBLIC)\n\n # test photo view\n self.perm_escalate_helper_get_with_client(self.albumcontrol, self.testalbum, self.photo.id,\n \"photoid\", \"show_photo\", ALBUM_PUBLIC)\n\n # test individual photo view page\n self.perm_escalate_helper(self.albumcontrol, self.indivphotorequest, self.testalbum, self.photo.id,\n AnonymousUser(), album.display_photo, ALBUM_PUBLIC)",
"def test_auth_private(self):\n self.do_visible(True, None, False, tenant='froggy')",
"def test_filter_owner_permission(self):\n User = get_user_model()\n user1 = User.objects.create(username=\"test_user1\", email=\"[email protected]\")\n obj = DescriptorSchema.objects.create(contributor=user1)\n obj.set_permission(Permission.VIEW, user1)\n\n data_template = {\n \"users\": {user1.id: \"view\"},\n \"groups\": {1: \"edit\", 2: \"NONE\"},\n }\n\n check_owner_permission(data_template, False, obj)\n\n # Check that only owner can set owner permission.\n data = deepcopy(data_template)\n data[\"users\"][1] = \"owner\"\n with self.assertRaises(exceptions.PermissionDenied):\n check_owner_permission(data, False, obj)\n check_owner_permission(data, True, obj)\n\n # Check that only owner can rewoke owner permission.\n obj.set_permission(Permission.OWNER, user1)\n data = deepcopy(data_template)\n data[\"users\"][1] = \"edit\"\n with self.assertRaises(exceptions.PermissionDenied):\n check_owner_permission(data, False, obj)\n check_owner_permission(data, True, obj)\n\n # Check that group can not be owner.\n obj.set_permission(Permission.VIEW, user1)\n data = deepcopy(data_template)\n data[\"groups\"][1] = \"owner\"\n with self.assertRaises(exceptions.ParseError):\n check_owner_permission(data, False, obj)\n with self.assertRaises(exceptions.ParseError):\n check_owner_permission(data, True, obj)",
"def test_home_as_anonymous(self):\n response = self.client.get(\"/images/contents/\")\n self.assertEqual(response.status_code, 401)",
"def test_home_as_anonymous(self):\n response = self.client.get(\"/images/contents/\")\n self.assertEqual(response.status_code, 401)",
"def test_not_owner(self):\n creating_user = create_user()\n creating_user.save()\n festival = create_festival('test', creating_user)\n festival.save()\n\n concert = create_concert(festival, 'test')\n concert.save()\n\n login(self.client)\n\n client = create_client('test')\n client.delete_access = True\n client.save()\n\n response = self.client.post('/backend/u/conc/', {'client': 'test', 'id': concert.pk})\n self.assertEqual(response.status_code, 200)\n self.assertEqual('Permission not granted', response.content.decode('utf-8'))",
"def test_no_images_unauthorized(self):\n res = self.client.get(IMAGE_URL)\n self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)",
"def check_owner(data=None, **kw):\n if data and 'owner_id' in data and not data['owner_id'] == current_user.id:\n raise ProcessingException(description=\"No write privileges\",\n code=401)"
] | [
"0.68287414",
"0.68124455",
"0.6661694",
"0.65978193",
"0.6575297",
"0.6509962",
"0.6473352",
"0.64539516",
"0.6439792",
"0.63888246",
"0.63792306",
"0.6371669",
"0.6366299",
"0.6361131",
"0.63585526",
"0.62366015",
"0.6124907",
"0.6047954",
"0.6000606",
"0.5996451",
"0.5975125",
"0.5964795",
"0.5952911",
"0.59268355",
"0.5913268",
"0.59085023",
"0.59085023",
"0.5895989",
"0.5867335",
"0.5856853"
] | 0.7047812 | 0 |
Tests that an empty context (with is_admin set to True) can access an owned image with is_public set to False. | def test_empty_private_owned(self):
self.do_visible(True, 'pattieblack', False, is_admin=True) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_empty_public_owned(self):\n self.do_visible(True, 'pattieblack', True, is_admin=True)",
"def test_auth_public_unowned(self):\n self.do_visible(True, 'pattieblack', True, tenant='froggy')",
"def test_should_render_for_owner_unpublished(self) -> None:\n self.assertTrue(self.action.should_render(\n context=self._create_request_context(\n public=False)))",
"def test_auth_private_unowned(self):\n self.do_visible(False, 'pattieblack', False, tenant='froggy')",
"def test_empty_private(self):\n self.do_visible(True, None, False, is_admin=True)",
"def test_auth_public_owned(self):\n self.do_visible(True, 'pattieblack', True, tenant='pattieblack')",
"def do_visible(self, exp_res, img_owner, img_public, **kwargs):\n\n img = FakeImage(img_owner, img_public)\n ctx = context.RequestContext(**kwargs)\n\n self.assertEqual(ctx.is_image_visible(img), exp_res)",
"def test_empty_public(self):\n self.do_visible(True, None, True, is_admin=True)",
"def test_anon_public_owned(self):\n self.do_visible(True, 'pattieblack', True)",
"def test_auth_private_owned(self):\n self.do_visible(True, 'pattieblack', False, tenant='pattieblack')",
"def test_anon_private_owned(self):\n self.do_visible(False, 'pattieblack', False)",
"def test_logged_in_owner(self):\n self.make_logged_in_owner()\n\n # test show album\n self.perm_escalate_helper(self.albumcontrol, self.showalbumrequest, self.testalbum, self.testalbum.id,\n self.u, album.display_album, ALBUM_PRIVATE)\n\n # test photo view\n self.perm_escalate_helper_get_with_client(self.albumcontrol, self.testalbum, self.photo.id,\n \"photoid\", \"show_photo\", ALBUM_PRIVATE)\n\n # test individual photo view page\n self.perm_escalate_helper(self.albumcontrol, self.indivphotorequest, self.testalbum, self.photo.id,\n self.u, album.display_photo, ALBUM_PRIVATE)",
"def test_aws_service_api_private_image_get(self):\n pass",
"def test_aws_service_api_private_images_get(self):\n pass",
"def test_get_owner_image(self):\n\n # user1 is owner of image_id 1\n # user2 is owner of image ids (2,3)\n for image_id in range(1, 4):\n url = reverse(self.url_name_one, args=(image_id,))\n if image_id == 1:\n self.client.force_authenticate(self.user1)\n else:\n self.client.force_authenticate(self.user2)\n\n response = self.client.get(url, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n if image_id == 1:\n self.assertEqual(response.data[\"owner\"], \"user1\")\n else:\n self.assertEqual(response.data[\"owner\"], \"user2\")\n\n # user2 try to get image_id 1 which is owner user1\n url = reverse(self.url_name_one, args=(1,))\n response = self.client.get(url, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def test_public_user(self):\n set_permission(Permission.SHARE, self.user1, self.collection)\n\n data = {\"public\": \"view\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n\n data = {\"public\": \"none\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n\n data = {\"public\": \"edit\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n data = {\"public\": \"share\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n data = {\"public\": \"owner\"}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)",
"def test_not_logged_in(self):\n\n # test show album\n self.perm_escalate_helper(self.albumcontrol, self.showalbumrequest, self.testalbum, self.testalbum.id,\n AnonymousUser(), album.display_album, ALBUM_PUBLIC)\n\n # test photo view\n self.perm_escalate_helper_get_with_client(self.albumcontrol, self.testalbum, self.photo.id,\n \"photoid\", \"show_photo\", ALBUM_PUBLIC)\n\n # test individual photo view page\n self.perm_escalate_helper(self.albumcontrol, self.indivphotorequest, self.testalbum, self.photo.id,\n AnonymousUser(), album.display_photo, ALBUM_PUBLIC)",
"def test_not_owner(self):\n creating_user = create_user()\n creating_user.save()\n festival = create_festival('test', creating_user)\n festival.save()\n\n concert = create_concert(festival, 'test')\n concert.save()\n\n login(self.client)\n\n client = create_client('test')\n client.delete_access = True\n client.save()\n\n response = self.client.post('/backend/u/conc/', {'client': 'test', 'id': concert.pk})\n self.assertEqual(response.status_code, 200)\n self.assertEqual('Permission not granted', response.content.decode('utf-8'))",
"def test_permissions(self):\n taxonomy = self.get_taxonomy()\n return True if self.request.user == taxonomy.author else taxonomy.public",
"def test_auth_private(self):\n self.do_visible(True, None, False, tenant='froggy')",
"def test_filter_owner_permission(self):\n User = get_user_model()\n user1 = User.objects.create(username=\"test_user1\", email=\"[email protected]\")\n obj = DescriptorSchema.objects.create(contributor=user1)\n obj.set_permission(Permission.VIEW, user1)\n\n data_template = {\n \"users\": {user1.id: \"view\"},\n \"groups\": {1: \"edit\", 2: \"NONE\"},\n }\n\n check_owner_permission(data_template, False, obj)\n\n # Check that only owner can set owner permission.\n data = deepcopy(data_template)\n data[\"users\"][1] = \"owner\"\n with self.assertRaises(exceptions.PermissionDenied):\n check_owner_permission(data, False, obj)\n check_owner_permission(data, True, obj)\n\n # Check that only owner can rewoke owner permission.\n obj.set_permission(Permission.OWNER, user1)\n data = deepcopy(data_template)\n data[\"users\"][1] = \"edit\"\n with self.assertRaises(exceptions.PermissionDenied):\n check_owner_permission(data, False, obj)\n check_owner_permission(data, True, obj)\n\n # Check that group can not be owner.\n obj.set_permission(Permission.VIEW, user1)\n data = deepcopy(data_template)\n data[\"groups\"][1] = \"owner\"\n with self.assertRaises(exceptions.ParseError):\n check_owner_permission(data, False, obj)\n with self.assertRaises(exceptions.ParseError):\n check_owner_permission(data, True, obj)",
"def test_aws_service_api_public_image_get(self):\n pass",
"def check_owner(data=None, **kw):\n if data and 'owner_id' in data and not data['owner_id'] == current_user.id:\n raise ProcessingException(description=\"No write privileges\",\n code=401)",
"def test_home_as_anonymous(self):\n response = self.client.get(\"/images/contents/\")\n self.assertEqual(response.status_code, 401)",
"def test_home_as_anonymous(self):\n response = self.client.get(\"/images/contents/\")\n self.assertEqual(response.status_code, 401)",
"def test_admin_public(self):\n with self.login(self.user_admin):\n self.assertTrue(current_user.is_authenticated)\n self.assertEqual(current_user, self.user_admin)\n\n rv = self.client.get('/')\n self.assertEqual(b'public', rv.data)",
"def test_no_images_unauthorized(self):\n res = self.client.get(IMAGE_URL)\n self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)",
"def test_filter_public_permissions(self):\n data = {\"public\": \"view\"}\n check_public_permissions(data)\n\n data = {\"public\": \"edit\"}\n with self.assertRaises(exceptions.PermissionDenied):\n check_public_permissions(data)\n\n data = {\"public\": \"share\"}\n with self.assertRaises(exceptions.PermissionDenied):\n check_public_permissions(data)\n\n data = {\"public\": \"owner\"}\n with self.assertRaises(exceptions.PermissionDenied):\n check_public_permissions(data)",
"def test_auth_public(self):\n self.do_visible(True, None, True, tenant='froggy')",
"def test_user_without_share(self):\n set_permission(Permission.EDIT, self.user1, self.collection)\n\n # Can not add permissions to users.\n data = {\"users\": {self.user2.pk: \"view\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n # Can not add permissions to groups.\n data = {\"users\": {self.group.pk: \"view\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)"
] | [
"0.70528495",
"0.6893526",
"0.67592186",
"0.6630321",
"0.65318274",
"0.65298545",
"0.6517861",
"0.65116936",
"0.6454932",
"0.6407351",
"0.63348746",
"0.63260263",
"0.63152415",
"0.6308235",
"0.62680256",
"0.62383586",
"0.6053724",
"0.59971654",
"0.5979315",
"0.59685737",
"0.59361196",
"0.5921458",
"0.5909884",
"0.590225",
"0.590225",
"0.5894046",
"0.58879244",
"0.58859384",
"0.5884606",
"0.58420056"
] | 0.6911272 | 1 |
Tests that an authenticated context (with is_admin set to False) cannot share an image it does not own even if it is shared with it, but with can_share = False. | def test_auth_sharable_cannot_share(self):
self.do_sharable(False, 'pattieblack', FakeMembership(False),
tenant='froggy') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ensure_share(self, context, share, share_server=None):\n pass",
"def ensure_share(self, context, share, share_server=None):\r\n LOG.debug(\"Ensure share.\")",
"def cant_share_photo(request, ttl=None,*args, **kwargs):\n\tif ttl:\n\t\ttry:\n\t\t\tttl = int(ttl)\n\t\texcept ValueError:\n\t\t\tttl = None\n\tphoto_id = request.session.get(\"personal_group_shared_photo_id\",None)\n\torigin = request.session.get(\"personal_group_shared_photo_origin\",None)\n\tphoto_url = request.session.get(\"personal_group_shared_photo_url\",None)\n\tphoto_caption = request.session.get(\"personal_group_shared_photo_caption\",None)\n\tphoto_owner_username = request.session.get(\"personal_group_shared_photo_owner_username\",None)\n\treturn render(request,\"personal_group/sharing/photo_not_shared.html\",{'photo_caption':photo_caption,'photo_id':photo_id,'photo_url':photo_url,\\\n\t\t'photo_owner_username':photo_owner_username,'origin':origin,'ttl':ttl})",
"def canShare(self):\n return False",
"def test_auth_sharable_can_share(self):\n self.do_sharable(True, 'pattieblack', FakeMembership(True),\n tenant='froggy')",
"def test_user_without_share(self):\n set_permission(Permission.EDIT, self.user1, self.collection)\n\n # Can not add permissions to users.\n data = {\"users\": {self.user2.pk: \"view\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n # Can not add permissions to groups.\n data = {\"users\": {self.group.pk: \"view\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)",
"def deny_access(self, context, share, access, share_server=None):\n self._get_helper(share).deny_access('/', share, access)",
"def deny_access(self, context, share, access, share_server=None):\r\n LOG.debug(\"Deny access.\")\r\n self.helper._deny_access(share['name'], access, share['share_proto'])",
"def test_un_logged_in_user_can_not_upload_picture(self):\n tmp_file = generate_image_for_testing()\n response = self.client.post(self.user_passport_url,\n data={'passport': tmp_file})\n\n self.assertEqual(403, response.status_code)",
"def test_anon_shared(self):\n self.do_sharable(False, 'pattieblack', None)\n self.do_sharable(False, 'pattieblack', FakeMembership(True))",
"def allow_access(self, context, share, access, share_server=None):\r\n LOG.debug(\"Allow access.\")\r\n self.helper._allow_access(share['name'], access, share['share_proto'])",
"def test_empty_shared(self):\n self.do_sharable(False, 'pattieblack', None, is_admin=True)\n self.do_sharable(False, 'pattieblack', FakeMembership(True),\n is_admin=True)",
"def test_auth_sharable_owned(self):\n self.do_sharable(True, 'pattieblack', None, tenant='pattieblack')",
"def do_sharable(self, exp_res, img_owner, membership=None, **kwargs):\n\n img = FakeImage(img_owner, True)\n ctx = context.RequestContext(**kwargs)\n\n sharable_args = {}\n if membership is not None:\n sharable_args['membership'] = membership\n\n self.assertEqual(ctx.is_image_sharable(img, **sharable_args), exp_res)",
"def allow_access(self, context, share, access, share_server=None):\n self._get_helper(share).allow_access('/', share, access)",
"def test_not_logged_in(self):\n\n # test show album\n self.perm_escalate_helper(self.albumcontrol, self.showalbumrequest, self.testalbum, self.testalbum.id,\n AnonymousUser(), album.display_album, ALBUM_PUBLIC)\n\n # test photo view\n self.perm_escalate_helper_get_with_client(self.albumcontrol, self.testalbum, self.photo.id,\n \"photoid\", \"show_photo\", ALBUM_PUBLIC)\n\n # test individual photo view page\n self.perm_escalate_helper(self.albumcontrol, self.indivphotorequest, self.testalbum, self.photo.id,\n AnonymousUser(), album.display_photo, ALBUM_PUBLIC)",
"def media_image_remotely_accessible(self) -> bool:\n return True",
"def test_logged_in_not_friend(self):\n\n # log in\n self.make_logged_in_not_friend()\n\n # test show album\n self.perm_escalate_helper(self.albumcontrol, self.showalbumrequest, self.testalbum, self.testalbum.id,\n self.u2, album.display_album, ALBUM_PUBLIC)\n\n # test photo view\n self.perm_escalate_helper_get_with_client(self.albumcontrol, self.testalbum, self.photo.id,\n \"photoid\", \"show_photo\", ALBUM_PUBLIC)\n\n # test individual photo view page\n self.perm_escalate_helper(self.albumcontrol, self.indivphotorequest, self.testalbum, self.photo.id,\n self.u2, album.display_photo, ALBUM_PUBLIC)",
"def test_kyc_post_legal_share_holder(self):\n pass",
"def test_dashboards_v2_share(self):\n pass",
"def test_wrong_config_shares0(self):\n self.app.post_json(url=\"/config/shares\",\n params=dict(\n source='gsiftp://source',\n destination='gsiftp://nowhere',\n vo='dteam',\n share='dfdf'\n ),\n status=400\n )",
"def test_protect_owner(self):\n self.collection.set_permission(Permission.SHARE, self.user1)\n\n # User with share permission cannot grant ``owner`` permission\n data = {\"users\": {self.user2.pk: \"owner\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n self.assertNotIn(\"owner\", self.collection.get_permissions(self.user2))\n self.assertFalse(PermissionModel.objects.filter(user=self.user2).exists())\n\n # User with share permission cannot revoke ``owner`` permission\n self.collection.set_permission(Permission.OWNER, self.user2)\n data = {\"users\": {self.user2.pk: \"editor\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n self.assertEqual(\n self.collection.get_permissions(self.user2),\n [Permission.VIEW, Permission.EDIT, Permission.SHARE, Permission.OWNER],\n )\n self.collection.set_permission(Permission.NONE, self.user2)\n\n # Now let user1 be owner on collection.\n set_permission(Permission.OWNER, self.user1, self.collection)\n\n # ``owner`` permission cannot be assigned to a group\n data = {\"groups\": {self.group.pk: \"owner\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertFalse(PermissionModel.objects.filter(group=self.group).exists())\n\n # User with owner permission can grant ``owner`` permission\n data = {\"users\": {self.user2.pk: \"owner\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n self.assertEqual(\n self.collection.get_permissions(self.user2),\n [Permission.VIEW, Permission.EDIT, Permission.SHARE, Permission.OWNER],\n )\n\n # User with owner permission can revoke ``owner`` permission\n data = {\"users\": {self.user2.pk: \"edit\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n self.assertFalse(\n PermissionModel.objects.filter(\n user=self.user2, value=Permission.OWNER.value\n ).exists()\n )\n\n # User with owner permission cannot remove all owners\n data = {\"users\": {self.user1.pk: \"edit\", self.owner.pk: \"edit\"}}\n\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(resp.data[\"detail\"], \"Object must have at least one owner.\")\n\n owner_permissions = self.collection.permission_group.permissions.filter(\n value=Permission.OWNER.value\n )\n owner_count = owner_permissions.count()\n self.assertEqual(owner_count, 2)\n\n # User can delete his owner permission if there is at least one other owner\n self.assertTrue(owner_permissions.filter(user=self.user1).exists())\n data = {\"users\": {self.user1.pk: \"view\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n self.assertFalse(owner_permissions.filter(user=self.user1.pk).exists())",
"def test_locked_asset_not_logged_in(self):\r\n self.client.logout()\r\n resp = self.client.get(self.url_locked)\r\n self.assertEqual(resp.status_code, 403) # pylint: disable=E1103\r",
"def test_no_images_unauthorized(self):\n res = self.client.get(IMAGE_URL)\n self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)",
"def test_unauthenticated_resource_allowed(self):\n raise NotImplementedError # FIXME",
"def test_auth_private_unowned(self):\n self.do_visible(False, 'pattieblack', False, tenant='froggy')",
"def share():\n return True",
"def test_share(self):\n\n # In the actual test, we'll want to confirm that an IP address\n # can be shared to a group\n\n # Try to share with the group--fails for now (operation not\n # implemented in nova); note: change 1 to group, '10.0.0.1' to IP\n dtutil.assert_raises(novaclient.OpenStackException,\n self.server.share_ip, 1, '10.0.0.1', True)",
"def test_01_self_unshare_resource(self):\n holes = self.holes\n cat = self.cat\n dog = self.dog\n cat.uaccess.share_resource_with_user(holes, dog, PrivilegeCodes.CHANGE)\n self.assertTrue(dog in holes.raccess.edit_users)\n self.assertTrue(dog in holes.raccess.view_users)\n self.assertTrue(\n is_equal_to_as_set(\n [dog],\n dog.uaccess.get_resource_unshare_users(holes)))\n dog.uaccess.unshare_resource_with_user(holes, dog)\n self.assertFalse(dog in holes.raccess.edit_users)\n self.assertFalse(dog in holes.raccess.view_users)\n self.assertTrue(\n is_equal_to_as_set(\n [], dog.uaccess.get_resource_unshare_users(holes)))",
"def test_auth_sharable(self):\n self.do_sharable(False, 'pattieblack', None, tenant='froggy')"
] | [
"0.6938848",
"0.6857029",
"0.68210304",
"0.67752564",
"0.6616296",
"0.63722545",
"0.6287799",
"0.6185038",
"0.61454177",
"0.6070145",
"0.6063103",
"0.6063056",
"0.59651726",
"0.5954177",
"0.5950448",
"0.58537835",
"0.5789933",
"0.57651097",
"0.5743968",
"0.57354796",
"0.5734228",
"0.57323635",
"0.5713446",
"0.5712914",
"0.57039493",
"0.5692112",
"0.5690987",
"0.56862795",
"0.56805223",
"0.5660907"
] | 0.74366206 | 0 |
loads file FILTER, returns filter matrix | def load_filter():
if not os.path.isfile(FILTER):
print('no filter found, creating square grid')
return []
with open(FILTER, 'r') as ff:
reader = csv.reader(ff)
l = list(reader)
ar = numpy.asarray(l)
# ar = numpy.transpose(ar, (0, 1))
# ar = numpy.flip(ar, 1)
# ar = numpy.rot90(ar, k=3, axes=(0, 1))
# ar = numpy.swapaxes(ar, 0, 1)
f = list(map(list, ar))
return f | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def load_filter_file(self, file_path): \n self._pop_all_self()\n self.filter_list = []\n self.file_path = file_path \n \n with codecs.open(self.file_path, 'r', encoding='cp1252') as fid: \n for k, line in enumerate(fid):\n line = line.lstrip('\\n\\r ')\n if line.startswith('#'):\n continue \n split_line = [item.strip() for item in line.split('\\t')]\n if k==0:\n # Header\n header = split_line\n else:\n line_dict = dict(zip(header, split_line))\n self[line_dict['variable']] = SingleFilter(line_dict, self.parameter)\n\n # Save attributes\n for item in self.keys():\n setattr(self, item, self[item])\n \n self.header = sorted(header)\n \n if self.filter_type == 'data':\n self.year_list = [y for y in range(self['YEAR_INTERVAL'].value[0], \n self['YEAR_INTERVAL'].value[1]+1)]",
"def _read_filter_data(filename):\n gains = []\n freqs = []\n freq_scale = 0\n with open(filename) as f:\n for line in f:\n words = line.split()\n if line.startswith('Freq'):\n _, scale = words[0].split(\"(\")\n scale = scale.rstrip(\")\")\n if scale==\"Hz\":\n freq_scale = 1\n elif scale==\"kHz\":\n freq_scale = 1e3\n elif scale==\"MHz\":\n freq_scale = 1e6\n elif scale==\"GHz\":\n freq_scale = 1e9\n else:\n raise ValueError(\"Cannot parse line: '\"+line+\"'\")\n elif len(words)==3 and words[0]!=\"Total\":\n f, g, p = line.split(\",\")\n freq = float(f) * freq_scale\n gain = float(g)\n phase = float(p)\n freqs.append(freq)\n gains.append(gain * np.exp(1j*phase))\n\n return np.array(gains), np.array(freqs)",
"def _load_filter(self, fname, interp=True, lamb=None):\n ftab = self.hdf\n if hasattr(fname, 'decode'):\n fnode = ftab.get_node('/filters/' + fname.decode('utf8'))\n else:\n fnode = ftab.get_node('/filters/' + fname)\n flamb = fnode[:]['WAVELENGTH']\n transmit = fnode[:]['THROUGHPUT']\n dtype = 'photon'\n unit = None\n\n attrs = fnode.attrs\n if 'DETECTOR' in attrs:\n dtype = attrs['DETECTOR']\n if 'WAVELENGTH_UNIT' in attrs:\n unit = attrs['WAVELENGTH_UNIT']\n\n fil = UnitFilter(flamb, transmit, name=fnode.name,\n dtype=dtype, unit=unit)\n\n if interp & (lamb is not None):\n fil = fil.reinterp(lamb)\n return fil",
"def read_filter(filter_file):\n\n fd = open(filter_file, \"r\")\n lines = fd.readlines()\n fd.close()\n\n wavelengths = []\n weights = []\n for line in lines:\n line = line.strip()\n words = line.split()\n wavelengths.append(float(words[0]))\n weights.append(float(words[1]))\n\n return (wavelengths, weights)",
"def parseFilter(filterList):\n filter_mat = None\n for line in filterList:\n try:\n line = np.array([float(x) for x in line.split()])\n if line.shape[0] != len(filterList):\n raise Exception(\"Filter must be square, pad with zeroes if you need a non-square filter\")\n\n if filter_mat is None:\n filter_mat = line\n else:\n filter_mat = np.vstack((filter_mat,line))\n except ValueError:\n logging.fatal(\"Invalid configuration: filter must contain only numbers\"); exit()\n except Exception as e:\n logging.fatal(e); exit()\n return filter_mat",
"def unpack(self, filter_file_type=\".dat\", verbose=False):\n\n if hasattr(self, \"phot\"):\n filter_names = np.unique(self.phot[\"filter\"])\n\n self.phot.add_index('filter', unique = True)\n\n\n for filter_name in filter_names:\n\n phot_table = self.phot.loc[\"filter\", filter_name]\n filter_filename = filter_name + filter_file_type\n if verbose: print(filter_filename)\n if verbose: print(phot_table)\n if verbose: print(type(filter_name), type(filter_file_type))\n\n # phot_table.meta = {\"filter_filename\": filter_filename}\n phot_table.meta[\"filter_filename\"] = filter_filename\n if not isinstance(phot_table, Row):\n # if len(np.unique(self.phot.loc[\"filter\", filter_name][\"MJD\"])) > 1:\n indices = phot_table.argsort(\"MJD\")\n # for column_name in phot_table.colnames:\n # phot_table[column_name] = phot_table[column_name][indices]\n sorted_phot_table = Table([phot_table[column_name][indices] for column_name in phot_table.colnames])\n else:\n sorted_phot_table = phot_table\n\n filter_key = np.unique(phot_table[\"filter\"])[0]\n\n if len(np.unique(phot_table[\"filter\"])) > 1 or filter_key != filter_name:\n raise errors.FilterMismatchError(\"There is a more than one filterdata in here! or there is a mismatch with filename\")\n path_to_filter = os.path.join(self.filter_directory, phot_table.meta['filter_filename'])\n\n # def load_filter(path, cmap = False, verbose = False):\n #\n if utils.check_file_path(os.path.abspath(path_to_filter)):\n filter_object = FilterClass()\n filter_object.read_filter_file(os.path.abspath(path_to_filter), verbose = verbose)\n filter_object.calculate_AB_zp()\n else:\n warnings.warn(\"Couldn't load the filter\")\n\n self.data_filters[filter_key] = filter_object\n\n self.data[filter_name] = sorted_phot_table\n\n self.filter_names = filter_names\n\n else:\n warnings.warn(\"Doesn't seem to be any data here (empty self.data)\")\n\n pass",
"def __init__(self, file_name):\n self.file_name = file_name\n\n self.A = 1\n self.B = 0\n self.C = 1\n self.R = FILTER_R\n self.Q = FILTER_Q\n\n self.data_stream = []",
"def load_filters(self):\n buffer_dict = dict(self.named_buffers())\n n = 0\n\n for k in self.phi_f.keys():\n if type(k) != str:\n self.phi_f[k] = buffer_dict['tensor' + str(n)]\n n += 1\n\n for psi_f in self.psi1_f:\n for sub_k in psi_f.keys():\n if type(sub_k) != str:\n psi_f[sub_k] = buffer_dict['tensor' + str(n)]\n n += 1\n\n for psi_f in self.psi2_f:\n for sub_k in psi_f.keys():\n if type(sub_k) != str:\n psi_f[sub_k] = buffer_dict['tensor' + str(n)]\n n += 1",
"def _load_filter(self, fname, **kwargs):\n with self as current_lib:\n return UnitLickIndex(fname, current_lib._content[fname])",
"def _load_filter(self, fname, **kwargs):\n with self as s:\n return LickIndex(fname, s._content[fname])",
"def _load_filter(self, fname, interp=True, lamb=None, *args, **kwargs):\n try:\n fil = UnitFilter.from_ascii(fname, *args, **kwargs)\n except Exception:\n content = self.content\n r = [k for k in content if fname in k]\n\n if len(r) <= 0: # try all lower for filenames (ascii convention)\n r = [k for k in content if fname.lower() in k]\n\n if len(r) > 1:\n print(\"auto correction found multiple choices\")\n print(r)\n raise ValueError('Refine name to one of {0}'.format(r))\n elif len(r) <= 0:\n raise ValueError('Cannot find filter {0}'.format(fname))\n else:\n fil = UnitFilter.from_ascii(r[0], *args, **kwargs)\n if (interp is True) and (lamb is not None):\n return fil.reinterp(lamb)\n else:\n return fil",
"def read_flt(input_file):\n\n if input_file.endswith('.flt') or input_file.endswith('.hdr'):\n input_file = input_file[:-4]\n else:\n print 'Incorrect filename'\n return 0,0 #exits module gracefully\n\n headers = read_headers(input_file)\n\n #read the data as a 1D array and reshape it to the dimensions in the header\n raster_array = read_bin(input_file).reshape(int(headers[1]), int(headers[0]))\n raster_array = raster_array.reshape(int(headers[1]), int(headers[0])) #rows, columns\n\n return raster_array, headers",
"def _read_filters(self, path):\n blob = utils.read_blob_file_contents(path)\n try:\n rules = json.loads(blob)\n except ValueError as e:\n msg = _(\n \"An error occurred when reading filters from file \"\n \"%(path)s: %(error)s\"\n ) % {\"path\": path, \"error\": e}\n raise exceptions.CommandError(msg)\n else:\n return rules",
"def getFileAsFiltFloatMatrix(dirPath, filt, columns, delim=\",\"):\n\tmat = list()\n\tfor rec in fileFiltSelFieldsRecGen(dirPath, filt, columns, delim):\n\t\tmat.append(asFloatList(rec))\n\treturn mat",
"def load_embedded(index, filename):\n weights = list()\n input_folder = os.path.join('input_files', 'embedded_matix')\n with open(os.path.join(input_folder, filename), 'r') as csvfile:\n filereader = csv.reader(csvfile, delimiter=',', quotechar='\"')\n for row in filereader:\n cat_ix = int(row[0])\n if index[cat_ix] == row[1].strip():\n weights.append([float(x) for x in row[2:]])\n csvfile.close()\n return np.array(weights)",
"def load_filter(filename):\n # parse config file\n if not os.path.isfile(filename):\n raise IOError('File \"%s\" does not exist' % filename)\n try:\n f = open(filename)\n except IOError:\n raise IOError('Could not open file \"%s\"' % filename)\n\n cfg_items = []\n for (i, line) in enumerate(f):\n try:\n # remove all comments and unnecessary whitespace\n normalizer = shlex.shlex(line)\n normalizer.wordchars += '.-'\n normal_line = ' '.join([t for t in normalizer])\n if normal_line:\n # split up normalized line and build dictionary\n cfg_item = {}\n for part in normal_line.split(','):\n cfg_split = shlex.split(part)\n key = cfg_split.pop(0)\n value = cfg_split\n cfg_item[key] = value\n cfg_items.append(cfg_item)\n except (IndexError, ValueError):\n raise RuntimeError( \\\n 'Could not parse line %i of file \"%s\"' % (i, filename))\n\n # look for global bit settings\n bits_global = None\n factor_bits_global = None\n norm_bits_global = None\n for cfg_item in cfg_items:\n if 'bits_global' in cfg_item:\n if bits_global is None:\n [bits_global] = cfg_item.pop('bits_global')\n bits_global = int(bits_global)\n else:\n raise RuntimeError( \\\n 'bits_global must not be specified more than once')\n if 'factor_bits_global' in cfg_item:\n if factor_bits_global is None:\n [factor_bits_global] = cfg_item.pop('factor_bits_global')\n factor_bits_global = int(factor_bits_global)\n else:\n raise RuntimeError( \\\n 'factor_bits_global must not be specified more than once')\n if 'norm_bits_global' in cfg_item:\n if norm_bits_global is None:\n [norm_bits_global] = cfg_item.pop('norm_bits_global')\n norm_bits_global = int(norm_bits_global)\n else:\n raise RuntimeError( \\\n 'norm_bits_global must not be specified more than once')\n\n # remove empty items from cfg_items, only node definitions should be left\n cfg_items = filter(None, cfg_items)\n\n # look for filter nodes\n filter_nodes = {}\n adjacency = {}\n input_node = None\n output_node = None\n for cfg_item in cfg_items:\n # mandatory settings\n try:\n [node] = cfg_item['node']\n except KeyError:\n raise RuntimeError('Node type not specified')\n try:\n [name] = cfg_item['name']\n except KeyError:\n raise RuntimeError('Name not specified')\n # optional settings\n if 'bits' in cfg_item:\n [bits] = map(int, cfg_item['bits'])\n else:\n bits = bits_global\n if 'connect' in cfg_item:\n connect = cfg_item['connect']\n else:\n connect = []\n if 'input' in cfg_item:\n if input_node is None:\n input_node = name\n else:\n raise RuntimeError('More than one input node specified')\n if 'output' in cfg_item:\n if output_node is None:\n output_node = name\n else:\n raise RuntimeError('More than one output node specified')\n\n # make filter node\n if name not in filter_nodes:\n if bits is not None:\n if node == 'Const':\n filter_nodes[name] = Const(bits)\n elif node == 'Add':\n filter_nodes[name] = Add(bits)\n elif node == 'Delay':\n filter_nodes[name] = Delay(bits)\n elif node == 'Multiply':\n if 'factor_bits' in cfg_item:\n [factor_bits] = cfg_item['factor_bits']\n factor_bits = int(factor_bits)\n else:\n factor_bits = factor_bits_global\n if 'norm_bits' in cfg_item:\n [norm_bits] = cfg_item['norm_bits']\n norm_bits = int(norm_bits)\n else:\n norm_bits = norm_bits_global\n if (factor_bits is not None and norm_bits is not None):\n filter_nodes[name] = Multiply(\n bits, factor_bits, norm_bits)\n if 'factor' in cfg_item:\n [factor] = cfg_item['factor']\n factor = float(factor)\n filter_nodes[name].set_factor(factor, norm=True)\n else:\n raise ValueError('Unknown node type: %s' % node)\n else:\n raise RuntimeError('Number of bits for node \"%s\" not specified' \\\n % name)\n adjacency[name] = connect\n else:\n raise RuntimeError('Node \"%s\" already present' % name)\n\n # make filter\n if input_node is None:\n raise RuntimeError('No input node specified')\n elif output_node is None:\n raise RuntimeError('No output node specified')\n else:\n return Filter(filter_nodes, adjacency, input_node, output_node)",
"def add_filters(fnames):\n with Database(writable=True) as base:\n for fname in fnames:\n with open(fname, 'r') as f_fname:\n filter_name = f_fname.readline().strip('# \\n\\t')\n filter_type = f_fname.readline().strip('# \\n\\t')\n filter_description = f_fname.readline().strip('# \\n\\t')\n filter_table = np.genfromtxt(fname)\n # The table is transposed to have table[0] containing the\n # wavelength and table[1] containing the transmission.\n filter_table = filter_table.transpose()\n # We convert the wavelength from Å to nm.\n filter_table[0] *= 0.1\n\n print(\"Importing {}... ({} points)\".format(filter_name,\n filter_table.shape[1]))\n\n new_filter = Filter(filter_name, filter_description, filter_type,\n filter_table)\n\n # We normalise the filter and compute the effective wavelength.\n # If the filter is a pseudo-filter used to compute line fluxes, it\n # should not be normalised.\n if not filter_name.startswith('PSEUDO'):\n new_filter.normalise()\n else:\n new_filter.effective_wavelength = np.mean(\n filter_table[0][filter_table[1] > 0]\n )\n\n base.add_filter(new_filter)",
"def loadFile(filterExt):\n basicFilter = \"*.\" + filterExt\n filePath = fileDialog2(fileFilter=basicFilter, dialogStyle=2, fm=1)\n if(filePath != None):\n #openfile = open('/Users/camtton/Desktop/drawing.svg', 'r')\n tokens = getSVGpath(filePath[0])\n return tokens\n else:\n print 'Please select a %s file'%(filterExt)",
"def loadData(name):\n inputs = []\n outputs = []\n with open(name) as file:\n data = file.readlines()[2:]\n lines = map(str.split, data)\n for line in lines:\n inputs.append(preparePatterns(line[:-1]))\n outputs.append(float(line[-1]))\n length = len(inputs[0])\n return inputs, outputs, length",
"def ascii_to_filter(filename, filter_name=None, detector=None, temperature=None, \n filter_type=None, wcol=0, tcol=None, **kwargs):\n strg = \"Reading a MiriFilter model from an ASCII file \"\n strg += \"is not longer supported.\"\n raise NotImplementedError(strg)",
"def load_data(self, f): \n self.sampling = True\n self.reads = np.load(f)\n self.total = self.reads.shape[0]",
"def _load_filter(self, *args, **kwargs):\n raise NotImplementedError",
"def load_filter_file(self, filter_path):\n logger.debug(\"Adding filter file {}\", filter_path)\n try:\n with open(filter_path, \"r\") as filter_file:\n try:\n json_filter_data = json.load(filter_file)\n except Exception as err:\n msg = \"Unable to parse filter file {} as a json file. {!r}\".format(\n filter_path, err)\n logger.debug(msg)\n raise errors.ParserError(msg)\n except IOError:\n raise errors.ParserError(\n \"Unable to access filter path '{}'\".format(filter_path))\n\n if \"version\" not in json_filter_data:\n raise errors.ParserError(\n \"Loading filter-file {} failed. Missing 'version' key.\".format(\n filter_path))\n\n if \"filters\" not in json_filter_data:\n raise errors.ParserError(\n \"Loading filter-file {} failed. Missing 'filters' key.\".format(\n filter_path))\n\n if not isinstance(json_filter_data[\"version\"], dict):\n raise errors.ParserError(\n \"Loading filter-file {} failed. \"\n \"Expecting value of 'version' entry to be a dictionary \"\n \"but instead its a {}.\".format(filter_path,\n type(json_filter_data[\"version\"])))\n\n version_info = json_filter_data[\"version\"]\n\n if \"major\" not in version_info:\n raise errors.ParserError(\n \"Loading filter-file {} failed. \"\n \"Missing 'major' key in 'version' value.\".format(filter_path))\n\n if \"minor\" not in version_info:\n raise errors.ParserError(\n \"Loading filter-file {} failed. \"\n \"Missing 'minor' key in 'version' value.\".format(filter_path))\n\n if not isinstance(version_info[\"major\"], int):\n raise errors.ParserError(\n \"Loading filter-file {} failed. \"\n \"Expecting int for major version found {} instead.\".format(\n filter_path, type(version_info[\"major\"])))\n\n if not isinstance(version_info[\"minor\"], int):\n raise errors.ParserError(\n \"Loading filter-file {} failed. \"\n \"Expecting int for minor version found {} instead.\".format(\n filter_path, type(version_info[\"minor\"])))\n\n if version_info[\"major\"] != FILTER_JSON_FORMAT_MAJOR_VERSION:\n raise errors.ParserError(\n \"Loading filter-file {} failed. \"\n \"Found unexpected major version in JSON filter file.\".format(\n filter_path))\n\n self._add_filters(json_filter_data[\"filters\"], filter_path)",
"def load_filter_evaluation(db_path):\n engine = create_engine('sqlite:///' + db_path)\n return pd.read_sql_table(TmFilterEval.__tablename__, engine)",
"def read_filter_file(self, path, fmt = \"ascii\",\n names = (\"wavelength\", \"throughput\"),\n wavelength_u = u.angstrom, verbose = False):\n if utils.check_file_path(os.path.abspath(path), verbose = verbose):\n self.data = Table.read(path, format = fmt, names = names)\n self.wavelength = self.data[\"wavelength\"] * wavelength_u\n if verbose: print(\"1\", np.nanmax(self.wavelength))\n self.wavelength = self.wavelength.to(u.angstrom)\n self.throughput = self.data[\"throughput\"]\n if verbose: print(\"2\", np.nanmax(self.wavelength))\n\n self.wavelength_u = self.wavelength.to(wavelength_u)\n self._filter_file_path = path\n if verbose: print(\"3\", np.nanmax(self.wavelength))\n\n filename = path.split('/')[-1]\n filename_no_extension = filename.split('.')[0]\n self.filter_name = filename_no_extension\n if verbose: print(\"4\", np.nanmax(self.wavelength))\n\n self.set_plot_colour(verbose = verbose)\n if verbose: print(\"5\", np.nanmax(self.wavelength))\n self.calculate_effective_wavelength()\n if verbose: print(\"6\", np.nanmax(self.wavelength))\n self.calculate_edges()\n if verbose: print(\"7\", np.nanmax(self.wavelength))\n self.get_zeropoint()\n if verbose: print(\"8\", np.nanmax(self.wavelength))\n\n else:\n warnings.warn(\"Foo\")",
"def load_vgg(file):\n vgg_layers = scipy.io.loadmat(file)['layers'][0]\n filters = {}\n for k in range(len(vgg_layers)):\n if vgg_layers[k][0][0][1][0] == 'conv':\n weights = np.array(vgg_layers[k][0][0][2][0][0])\n biases = np.reshape(vgg_layers[k][0][0][2][0][1], -1)\n filters['layer_{}'.format(k+1)] = [weights, biases]\n else:\n filters['layer_{}'.format(k+1)] = []\n return filters",
"def LoadSourceFilter(coverable_file_name):\n \n with open(coverable_file_name, \"r\") as cov_file:\n file_list = [line.strip() for line in cov_file.readlines()]\n return SourceFilter(file_list)",
"def fromfile(cls, f):\n raise NotImplementedError(\"ScalableRedisLocalBloomFilter not support fromfile\")",
"def get_filters(filepath):\n filters = {}\n with open(filepath, \"r\") as f:\n reader = csv.DictReader(f, delimiter=';')\n for row in reader:\n filter_id = row[\"Filter Column\"]\n filters.setdefault(filter_id, {})\n filters[filter_id][\"results\"] = row[\"Result\"].split(\", \")\n filters[filter_id][\"type\"] = row[\"Type\"]\n filters[filter_id][\"description\"] = ''.join(row[\"Description\"])\n return filters",
"def load_all_filters(self, interp=True, lamb=None):\n with self as s:\n filters = [s._load_filter(fname, interp=interp, lamb=lamb)\n for fname in s.content]\n return(filters)"
] | [
"0.66520554",
"0.657589",
"0.65067047",
"0.64896256",
"0.63868827",
"0.6371127",
"0.626495",
"0.5997112",
"0.59562635",
"0.5929189",
"0.59240067",
"0.5835717",
"0.58037555",
"0.5700353",
"0.56678426",
"0.5623952",
"0.5620478",
"0.55956954",
"0.5576885",
"0.55100983",
"0.550417",
"0.55021715",
"0.5485636",
"0.5467415",
"0.5460615",
"0.54592407",
"0.54575944",
"0.5457457",
"0.5453629",
"0.54457957"
] | 0.7438268 | 0 |
returns boolean, whether xy is occupied in filter matrix | def filtered(filter, xy):
try:
x, y = xy
return bool(filter[x][y])
except IndexError:
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def xy_occupied(xy, board):\n return True if board[xy[0]][xy[1]] else False",
"def occupiedNeighbor(self, xi, yi):\n\n xmax = self.mapData.og.info.width\n ymax = self.mapData.og.info.height\n\n if self.mapData.sampled:\n # Fails on an occupied cell\n assert self.mapData.mapArrayS[xi, yi] < 50\n for x in range(max(xi - 1, 0), min(xi + 1, xmax)):\n for y in range(max(yi - 1, 0), min(yi + 1, ymax)):\n if self.mapData.mapArrayS[x,y] > 50:\n return True\n return False\n else:\n # Fails on an occupied cell\n assert self.mapData.mapArray[xi, yi] < 50\n for x in range(max(xi - 1, 0), min(xi + 1, xmax)):\n for y in range(max(yi - 1, 0), min(yi + 1, ymax)):\n if self.mapData.mapArray[x,y] > 50:\n return True\n return False",
"def occupied(self, (xIndex, yIndex)):\n return xIndex < 0 or yIndex < 0 or \\\n xIndex >= self.xN or yIndex >= self.yN or \\\n self.grid[xIndex][yIndex]",
"def contains(self, xy):\n if np.ndim(xy) == 2:\n xp = xy[:, 0]\n yp = xy[:, 1]\n elif (np.ndim(xy) == 1) and (len(xy) == 2):\n xp = xy[0]\n yp = xy[1]\n else:\n raise ValueError(\"crazy\")\n\n xinside = (self.x0 <= xp) & (xp <= self.x1)\n yinside = (self.y0 <= yp) & (yp <= self.y1)\n return xinside & yinside",
"def contains(self, x):\n # need more to assure its a real SSP - ie on right torus\n return (len(x) == self._shape[0])",
"def full(self):\n for x in range(0,3):\n for y in range(0,3):\n if self[x,y] is None:\n return False\n return True",
"def __contains__(self, x):\n indexes = self.get_indexes(x)\n return self.sketch[indexes] > 0",
"def inside(i,j,im,h=H): #X\n return i-h >=0 and j-h >=0 and i+h+1<=im.shape[0] and j+h+1<=im.shape[1]",
"def check(self,a,x,y):\r\n return not self.exitsinrow(self.rows,x,a) and not self.existsincol(self.rows,y,a) and \\\r\n not self.exitsinblock(self.rows, x - x % 3, y - y % 3,a)",
"def fullGrid(state):\n return not ((state[:, :, 0] + state[:, :, 1]) == 0).any()",
"def point_in_map(self, x, y):\r\n return 0 <= x < self.width and 0 <= y < self.height and (x,y) not in self.walls",
"def __contains__(self, point):\n for component, dim in zip(point, self.dimensions):\n if component not in dim:\n return False\n return True",
"def filter(x,y):\n if tf.reduce_sum(y) > pixels:\n return True\n else:\n return False",
"def filter_tile_neighbors(self, coord):\n coord = coord.int_tuple\n if coord[1] <= self.MAX_Y and coord[0] <= self.MAX_X and coord[1] >= \\\n 0 and coord[0] >=\\\n 0 and (self.currentmap.boxAt(coord[0], coord[1])\n == 0 or self.currentmap.boxAt(coord[0], coord[1]) == 2):\n return True\n return False",
"def is_at_intersection(self):\n directions = 0\n self.tile = (self.get_nearest_row(), self.get_nearest_col())\n if self.internal_map[self.tile[0] - 1][self.tile[1]] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0] + 1][self.tile[1]] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0]][self.tile[1] - 1] not in ('x', ):\n directions += 1\n if self.internal_map[self.tile[0]][self.tile[1] + 1] not in ('x', ):\n directions += 1\n return True if directions > 2 else False",
"def __check_row(self, x: int, y: int) -> bool:\n return not any([self.__maze[x, y + i] for i in (-1, 0, 1)])",
"def contains(self, coord):\n # print(coord, self.position, self.size)\n return (0 <= coord[0] - self.position[0] < self.size[0] and\n 0 <= coord[1] - self.position[1] < self.size[1])",
"def __check_col(self, x: int, y: int) -> bool:\n return not any([self.__maze[x + i, y] for i in (-1, 0, 1)])",
"def test(self, grid, flag):\n x = self.x+SPEED_X[flag]\n y = self.y+SPEED_Y[flag]\n return 0 <= x < self.n and 0 <= y < self.n and grid[y][x] == 1",
"def contains(self, point):\n return 0 <= point.x <= 1 \\\n and 0 <= point.y <= 1 \\\n and 0 <= point.z <= 1",
"def _is_occupied(\n grid: List[List[str]], row: int, col: int, dx: int, dy: int) -> bool:\n while 0 <= (row + dy) < len(grid) and 0 <= (col + dx) < len(grid[0]):\n row += dy\n col += dx\n if grid[row][col] == 'L':\n return False\n if grid[row][col] == '#':\n return True\n return False",
"def is_visible(self, x, y) :\n\t\tres_x = (x > self.x_min) and (x < self.x_max)\n\t\t# print 'res_x : {0}, x : {1}, x_min : {2}, x_max:{3}'.format(res_x, x, self.x_min, self.x_max)\n\t\tres_y = (y > self.y_min) #and (y < self.y_max)\n\t\treturn res_x and res_y",
"def __cell_is_occupied(self, x, y) -> bool:\n return self.occupancy_map.data[self.__get_cell_index(x, y)] != 0",
"def solved(self):\n return all(cell == 1 for row in self.faces for cell in row) or all(cell == 0 for row in self.faces for cell in row)",
"def is_occupied(self, p):\r\n return 0 <= p[0] < self.width and 0 <= p[1] < self.height and self.grid[p[1]][p[0]] == '#'",
"def __cell_is_in_map(self, x, y) -> bool:\n return x >= 0 and y >= 0 and x < self.occupancy_map.info.width and y < self.occupancy_map.info.height",
"def is_in_field(self, x, y):\n return (self.origin_x <= x < self.width) and (self.origin_y <= y < self.height)",
"def any(self):\n boolean = True\n if type(self.idxs) == np.ndarray:\n boolean = all(self.idxs.shape)\n elif type(self.idxs) == list:\n sh = np.array(self.idxs).shape\n if len(sh) >= 2:\n boolean = np.all(sh)\n return boolean",
"def _check_occupied(self, col, row):\n if self.board[row - 1][col - 1] == EMPTY:\n return False\n else:\n return True",
"def visited(self, row, col):\n return (row, col) in self._visited"
] | [
"0.6924757",
"0.6860181",
"0.6504782",
"0.6499604",
"0.6476733",
"0.6406629",
"0.6393374",
"0.63787687",
"0.6288766",
"0.6284607",
"0.6230571",
"0.62087417",
"0.6196269",
"0.61808234",
"0.6141441",
"0.612595",
"0.61152285",
"0.61060596",
"0.60959953",
"0.60850585",
"0.60841113",
"0.60813856",
"0.6079903",
"0.6060185",
"0.60549986",
"0.6052309",
"0.6033238",
"0.60327077",
"0.60222626",
"0.60032696"
] | 0.71220756 | 0 |
Write the matrix to a csv table | def write_out(matrix, filename):
with open(filename, 'w') as csvfile:
writer = csv.writer(csvfile)
for r in matrix:
writer.writerow(r)
print(filename + ' writen!') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def generate_csv(table, header):\n with open(\"%s.csv\" % header, \"w\") as csvfile:\n for i in range(len(table)):\n for j in range(len(table[i])):\n if j != len(table[i])-1:\n tmp = table[i][j] + \",\"\n else:\n tmp = table[i][j] + \"\\n\"\n csvfile.write(tmp)",
"def WriteToCsv(matrix, csvFileName, csvDelimiter=','):\r\n \r\n if os.path.isfile(csvFileName) == True:\r\n os.remove(csvFileName) # Deletes the CSV file\r\n\r\n filePermission = \"w\" # Platform-specific file reading privileges\r\n #if platform.system() == \"Windows\":\r\n # filePermission = \"wb\"\r\n \r\n with open(csvFileName, filePermission) as csvfile:\r\n writer = csv.writer(csvfile, delimiter=csvDelimiter, quotechar='|', quoting=csv.QUOTE_MINIMAL)\r\n for row in matrix:\r\n if row != []:\r\n writer.writerow(row)\r\n csvfile.close()",
"def createFileCSV(table, path=\"./prediction\"):\t\n\tif len(table) < 1:\n\t\traise NameError('Empty Table!')\n\telse:\n\t\tfile = open(path + '.csv', 'w+')\n\n\t\tfile.write(table[0].toStringHeaders() + \"\\n\")\n\n\t\tfor row in table:\n\t\t\tfile.write(row.toStringCSV() + '\\n')\n\t\tfile.close()",
"def csv_output(self):\r\n fh = open(\"output.csv\",'w')\r\n for i in range(len(self.population.columns)):\r\n if i != len(self.population.columns)-1:\r\n fh.write(str(self.population.columns[i]))\r\n fh.write(\",\")\r\n else:\r\n fh.write(str(self.population.columns[i]))\r\n fh.write(\"\\n\")\r\n\r\n for i in range(len(self.population.data)):\r\n for j in range(len(self.population.data[i])):\r\n if j != len(self.population.data[i])-1:\r\n fh.write(str(self.population.data[i][j]))\r\n fh.write(\",\")\r\n else:\r\n fh.write(str(self.population.data[i][j]))\r\n fh.write(\"\\n\")\r\n fh.close()",
"def write_csv(self):\n self.tableView.df.to_csv('Data export.csv', index=False)\n print('CSV file exported')",
"def export_csv(self, path):\r\n\r\n with open(path, 'w') as f:\r\n f.write('# h,hr,m')\r\n\r\n if self.rho is not None:\r\n f.write(',rho')\r\n if self.temperature is not None:\r\n f.write(',temperature')\r\n\r\n f.write('\\n')\r\n for i in range(self.shape[0]):\r\n for j in range(self.shape[1]):\r\n f.write(f'{self.h[i, j]},{self.hr[i, j]},{self.m[i, j]}')\r\n if self.rho is not None:\r\n f.write(f',{self.rho[i, j]}')\r\n if self.temperature is not None:\r\n f.write(f',{self.temperature[i, j]}')\r\n f.write('\\n')\r\n return",
"def write_table_to_csv(table: List[List], filename: str):\n with open(filename, 'w') as csvfile:\n writer = csv.writer(csvfile, delimiter='\\t')\n for row in table:\n writer.writerow(row)",
"def to_csv(header, rows):\r\n with open('result.csv', 'w') as result:\r\n result_writer = csv.writer(result, delimiter=';')\r\n result_writer.writerow(header)\r\n result_writer.writerows(rows)",
"def write(self): \n # Open csv file\n with open(self.file_name, 'w', newline='') as file:\n self._writer = csv.writer(file)\n \n # Write header rows\n# self.write_sim_header_data(self.trace.sim.get_data())\n \n # Write trace table\n self._writer.writerow(['Record #', 'Rep', 'Time',\n 'Priority', 'Record Type', 'Name'])\n for trace_record in self.trace._record_list:\n self._writer.writerow(trace_record.get_row())\n file.close()",
"def mat_to_csv(\n self,\n input_matrix,\n output_csv,\n fields=None,\n n_tab=1,\n debug=False,\n i='origin',\n j='destination'\n ):\n script_text = r\"\"\"\n RUN PGM=MATRIX PRNFILE=\"format_env\\mat_to_csv.prn\" MSG='mat_to_csv'\n\n FILEI MATI[1] = filei_mati\n FILEO PRINTO[1] = fileo_printo\n\n print_headers\n JLOOP\n print_in_jloop\n ENDJLOOP\n\n ENDRUN\n \"\"\"\n if fields is None:\n tabs = ['tab_%i' % (i + 1) for i in range(n_tab)]\n fields = tabs\n else:\n n_tab = len(fields)\n field_names = ', '.join(fields)\n\n filei_mati = '\"%s\"' % input_matrix\n fileo_printo = '\"%s\"' % output_csv\n\n print_headers = 'IF (I = 1) \\n PRINT LIST =\"' + '\" ,\";\" ,\"'.join([i, j] + fields) + '\" PRINTO = 1 \\n ENDIF'\n print_assignation = ' '.join(['%s = MI.1.%s \\n' % (fields[i].replace(' ', '_'), i + 1) for i in range(n_tab)])\n print_statement = 'PRINT LIST = I, \";\", J, \";\", ' + ',\";\",'.join([f.replace(' ', '_') for f in fields]) + ' PRINTO = 1'\n print_in_jloop = print_assignation + ' \\n' + print_statement\n\n # creating a cube script\n script = open(self.environment + r'\\mat_to_csv.s', 'w', encoding='latin')\n script.write(script_text.replace(\n 'format_env', self.environment).replace(\n 'filei_mati', filei_mati).replace(\n 'fileo_printo', fileo_printo).replace(\n 'field_names', field_names).replace(\n 'print_in_jloop', print_in_jloop).replace('print_headers', print_headers))\n script.close()\n\n # runs the script with voyager.exe\n options = \"\"\"/Start /CloseWhenDone /Minimize /NoSplash\"\"\" if not debug else \"\"\n os.system('voyager.exe \"' + self.environment + r'\\mat_to_csv.s\" ' + options)",
"def generate_csv_table(table_values):\n\n with open('ayasdi_assignment.csv', 'wb') as csvfile:\n filewriter = csv.writer(csvfile, delimiter=',')\n filewriter.writerows(table_values)",
"def writeMatrix(self):\n\t\tpass",
"def _csvWriter(self):\r\n # Initialize Header\r\n table = []\r\n voltageRow = []\r\n for i in range(len(self._voltages)):\r\n voltageRow.append(self._voltages[i][0])\r\n voltageRow.append(\" \")\r\n if self._vna.isTwoComponents():\r\n voltageRow.append(\" \")\r\n table.append(voltageRow)\r\n \r\n # Fill table with data\r\n # if self._vna.isTwoComponents():\r\n # for i in range(len(self._frequency[0])):\r\n # row = []\r\n # for j in range(len(self._frequency)):\r\n # row.append(self._frequency[j][i])\r\n # row.append(self._intensity[j][2*i])\r\n # row.append(self._intensity[j][2*i + 1])\r\n # table.append(row)\r\n # else: \r\n for i in range(len(self._frequency[0])):\r\n row = []\r\n for j in range(len(self._frequency)):\r\n row.append(self._frequency[j][i])\r\n row.append(self._intensity[j][i])\r\n table.append(row)\r\n\r\n # Write to CSV\r\n filename = 'CSVs/' + self._vna.getDateFormatted() + '.csv'\r\n with open(filename, 'w', newline='') as csvfile:\r\n dataWriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)\r\n for i in range(len(table)):\r\n dataWriter.writerow(table[i])",
"def write_csv(self, filelike):\r\n items = self.rows()\r\n writer = unicodecsv.writer(filelike, encoding=\"utf-8\")\r\n writer.writerow(self.header())\r\n for item in items:\r\n writer.writerow(item)",
"def saveCSV(self):\n filename=tkFileDialog.asksaveasfilename(defaultextension='.csv',\n initialdir=os.getcwd(),\n filetypes=[(\"csv\",\"*.csv\"),(\"All files\",\"*.*\")])\n if not filename:\n return\n for m in self.matrices:\n matrix = self.matrices[m] \n if matrix != None: \n c=matrix.csvRepresentation()\n f=open(filename,'w')\n f.write(c)\n f.close()\n return",
"def write_table_to_file(table):\n with open(\"story.csv\", \"w\") as file:\n for record in table:\n row = ';'.join(record)\n file.write(row + \"\\n\")",
"def write_to_csv(self, data):\n with open(\"out.csv\", \"w\", newline=\"\") as f:\n writer = csv.writer(f)\n writer.writerow(self.column_names)\n writer.writerows(data)\n print(\" Updated succesfully \")",
"def WriteMatrix(matrix, outfile=sys.stdout, separator=\"\\t\", format=\"%f\",\n row_headers=None, col_headers=None):\n if col_headers:\n outfile.write(separator + separator.join(col_headers) + \"\\n\")\n\n for x in range(0, matrix.shape[0]):\n if row_headers:\n outfile.write(row_headers[x] + separator)\n outfile.write(\n string.join(map(lambda x: format % x, matrix[x, ]), separator) + \"\\n\")",
"def writeTable(table, filename):\n with open(filename, \"w\") as output:\n writer = csv.writer(output, lineterminator='\\n')\n writer.writerows(table)",
"def export_table(path, path_out):\n table = rb.get_table(path)\n table.to_csv(path_out, index=False)\n return",
"def save_csv(self, filename): # DONE\n self.data.to_csv(filename)",
"def to_csv(self, csvwriter):\n csvwriter.writerow(self.to_csv_row())",
"def exportToCsv(self, filepath):\n table = list()\n table.append(list(self.__header))\n for a in self.__assays:\n table.append(\n [unicode(a.timestamp.isoformat()),\n unicode(a.dab_cell_count),\n unicode(a.hem_cell_count),\n unicode(a.dab_dabhemfraction),\n unicode(a.img_path)])\n # File encoding will be same as it expected by Excel on machine where\n # this file was created.\n encoding = locale.getpreferredencoding()\n with open(filepath, mode='wb') as f:\n writer = UnicodeWriter(f, encoding=encoding, delimiter=';')\n writer.writerows(table)",
"def write_torque_table(A, filename):\n f = open(filename, 'w')\n for row in range(np.size(A, axis=0)):\n A[row,:].tofile(f, sep=',')\n f.write('\\n')\n f.close()",
"def write_csv(self, outfile, collapse_orders=False, show_age=False):\r\n # Write header row\r\n outfile.write(self.get_csv_header(collapse_orders, show_age).encode())\r\n\r\n # Write content\r\n for x in self.records:\r\n x.write_csv(outfile, collapse_orders, show_age)",
"def export_feature_matrix_csv(feature_matrix,path, delimiter = ','):\n with open(path, encoding='utf-8', mode='w') as f:\n header = ['symbol'] + feature_matrix.features\n writer = DictWriter(f, header,delimiter=delimiter)\n writer.writerow({h: h for h in header})\n for seg in feature_matrix.segments:\n #If FeatureMatrix uses dictionaries\n #outdict = feature_matrix[seg]\n #outdict['symbol'] = seg\n #writer.writerow(outdict)\n if seg in ['#','']: #wtf\n continue\n featline = feature_matrix.seg_to_feat_line(seg)\n outdict = {header[i]: featline[i] for i in range(len(header))}\n writer.writerow(outdict)",
"def write(self):\n \n self.df.to_csv('/home/austin/Desktop/Falcon/realestate/Falcon/Datasets/mls.csv')",
"def matrix_export_save(simulation, demandsegment, dir):\n matrix = demandsegment.matrix\n matrix_couples = Matrix.objects.filter(matrices=matrix)\n # To avoid conflict if two users export a file at the same time, we\n # generate a random name for the export file.\n filename = dir + '/matrix(' + demandsegment.usertype.name + ')(' + str(demandsegment.usertype.user_id) + ').tsv'\n\n with codecs.open(filename, 'w', encoding='utf8') as f:\n writer = csv.writer(f, delimiter='\\t')\n # Get a dictionary with all the values to export.\n values = matrix_couples.values_list('p__user_id', 'q__user_id', 'r')\n # Write a custom header.\n writer.writerow(['origin', 'destination', 'population'])\n writer.writerows(values)\n\n return filename",
"def at_write_prob_mat_to_csv(na_list, prob_mat, out_path):\n create_folder(os.path.dirname(out_path))\n f = gzip.open(out_path, 'w')\n for n in xrange(len(na_list)):\n na = na_list[n]\n f.write(na)\n for p in prob_mat[n]:\n f.write('\\t' + \"%.3f\" % p)\n f.write('\\r\\n')\n f.close()",
"def writeCSV():\n final_list = get_final_list()\n path_to_csv_File = 'system_metrics.csv'\n\n csv_file = open(path_to_csv_File, 'w+', newline='', encoding=\"utf8\")\n csv_file_writer = csv.writer(csv_file, delimiter=',')\n\n csv_file_writer.writerow(['Subscription', 'Resource', 'MetricType',\n 'Timestamp', 'Unit', 'Minimum', 'Maximum', 'Average'])\n\n for item in final_list:\n csv_file_writer.writerow([item['subscription'], item['resource'], item['metricType'], item['timestamp'],\n item['unit'], item['minimum'], item['maximum'], item['average']])\n\n print('Output written successfully!!')"
] | [
"0.75623345",
"0.7330622",
"0.71900606",
"0.71231455",
"0.71107894",
"0.7031687",
"0.7024952",
"0.70113516",
"0.697658",
"0.69399834",
"0.6934993",
"0.69087684",
"0.69058",
"0.6873836",
"0.6852782",
"0.6852024",
"0.6777883",
"0.676096",
"0.6755106",
"0.67167455",
"0.6708375",
"0.6671856",
"0.66679996",
"0.6666607",
"0.6664351",
"0.66559476",
"0.6622331",
"0.6607635",
"0.6579855",
"0.6562081"
] | 0.76784086 | 0 |
Construct a DCEL from the output of matplotlib.delaunay.delaunay. | def from_delaunay_triangulation(cls, xl, yl, triangles, circumcentres):
def add_containing_face_to_dcel():
containing_face_edges = [edge for edge in dcel.edges if not edge.nxt]
edge = containing_face_edges.pop()
face = Face(outer_component=None, inner_components=[edge])
dcel.faces.append(face)
first_edge = edge
previous_edge = [
e for e in containing_face_edges if e.get_destination() == edge.origin
]
edge.prev = previous_edge[0]
while len(containing_face_edges) > 1:
edge.incident_face = face
next_edge = [
e for e in containing_face_edges if e.origin == edge.get_destination()
]
edge.nxt = next_edge[0]
next_edge[0].prev = edge
edge = next_edge[0]
containing_face_edges.remove(next_edge[0])
edge_2 = containing_face_edges.pop()
edge.incident_face = face
edge_2.incident_face = face
edge_2.prev = edge
edge_2.nxt = first_edge
edge.nxt = edge_2
def add_triangle_edges(circumcentre):
triangles_edges = []
for vertex_idx, origin in enumerate(triangle_vertices):
# Destination of the edge in this triangle that has vertex as origin
destination = triangle_vertices[(vertex_idx + 1) % 3]
edge_1 = HalfEdge(origin)
edge_2 = HalfEdge(destination, twin=edge_1)
edge_1.twin = edge_2
edge_1 = dcel.add_edge(edge_1)
edge_2.twin = edge_1
edge_2 = dcel.add_edge(edge_2)
edge_1.twin = edge_2
triangles_edges.append(edge_1)
triangle_face = Face(triangles_edges[0], circumcentre=list(circumcentre))
dcel.faces.append(triangle_face)
# Set previous and next of the edges
for edge_idx, edge in enumerate(triangles_edges):
edge.nxt = triangles_edges[(edge_idx + 1) % 3]
edge.prev = triangles_edges[(edge_idx + 3 - 1) % 3]
edge.incident_face = triangle_face
triangle_vertices[edge_idx].incident_edge = edge
dcel = cls()
for t_idx, t in enumerate(triangles):
triangle_vertices = [
dcel.add_vertex(Vertex(x))
for x in du.get_triangle_vertices(xl, yl, t)
]
add_triangle_edges(circumcentres[t_idx])
add_containing_face_to_dcel()
return dcel | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _mesh(self):\n from scipy.spatial import Delaunay\n points = self.cluster.get_positions()\n delaunay = Delaunay(points)\n simplices = self._filter_max_dist_in_element(delaunay.simplices)\n delaunay.simplices = simplices\n return delaunay",
"def __plot_delaunay(self, ax=None) -> None:\n for simplex in self.hull.simplices:\n ax.plot(self.points[simplex, 0], self.points[simplex, 1], \"r-\")\n\n tri = Delaunay(self.points)\n ax.triplot(self.points[:, 0], self.points[:, 1], tri.simplices.copy(), lw=1)",
"def _get_diff_dc(self):\n self._diff_dc = tuple(encode_differential(self.data[:, 0, 0]))",
"def _DeRedden(lam,flux,ra,dec,dustmap_path='/Users/vzm83/Softwares/sfddata-master'): \n m = sfdmap.SFDMap(dustmap_path) \n flux_unred = pyasl.unred(lam,flux,m.ebv(ra,dec))\n return flux_unred",
"def delaunay_lattice_from_pts(xy, trimbound=True, target_z=-1, max_bond_length=-1, thres=4.0, zmethod='random',\n minimum_bonds=-1, check=False):\n NP = len(xy)\n tri = Delaunay(xy)\n TRI = tri.vertices\n\n # check\n # plt.triplot(xy[:,0], xy[:,1], TRI, 'go-')\n # plt.show()\n\n BL = TRI2BL(TRI)\n NL, KL = BL2NLandKL(BL, NP=NP, NN='min')\n\n if trimbound:\n # Cut unnatural edge bonds (ones that are long and skinny)\n NL, KL, BL, TRI = delaunay_cut_unnatural_boundary(xy, NL, KL, BL, TRI, thres)\n\n # check\n if check:\n plt.clf()\n plt.triplot(xy[:, 0], xy[:, 1], TRI, 'go-')\n plt.show()\n\n # Cut bonds longer than max allowed length\n if max_bond_length > 0:\n print 'Cutting bonds longer than max_bond_length...'\n BL = cut_bonds(BL, xy, max_bond_length)\n if check:\n display_lattice_2D(xy, BL, title='In delaunay_lattice_from_pts(), removed long bonds.')\n NL, KL = BL2NLandKL(BL, NN='min')\n\n if minimum_bonds > 0:\n # Remove any points with no bonds\n print 'Removing points without any bonds...'\n if minimum_bonds == 1:\n keep = KL.any(axis=1)\n else:\n keep = np.sum(KL, axis=1) > minimum_bonds\n # keep = np.array([np.count_nonzero(KL[i]) > minimum_bonds for i in range(len(KL))])\n xy, NL, KL, BL, PVxydict = remove_pts(keep, xy, BL, NN='min')\n if check:\n display_lattice_2D(xy, BL, NL=NL, KL=KL, title='In delaunay_lattice_from_pts(), removed pts without bonds.')\n\n # Cut bonds to tune average coordination\n if target_z > 0:\n print 'Cutting bonds to tune average coordination...'\n if zmethod == 'random':\n NL, KL, BL = cut_bonds_z_random(xy, NL, KL, BL, target_z)\n elif zmethod == 'highest':\n NL, KL, BL = cut_bonds_z_highest(xy, NL, KL, BL, target_z)\n\n print 'Constructing BM...'\n BM = NL2BM(xy, NL, KL)\n\n if check:\n display_lattice_2D(xy, BL, NL=NL, KL=KL, title='Checking output lattice in delaunay_lattice_from_pts()')\n # vc = cc[:,tri.neighbors]\n # # kill edges at infinity, plotting those would need more work...\n # vc[:,tri.neighbors == -1] = np.nan\n #\n # lines = []\n # lines.extend(zip(cc.T, vc[:,:,0].T))\n # lines.extend(zip(cc.T, vc[:,:,1].T))\n # lines.extend(zip(cc.T, vc[:,:,2].T))\n return xy, NL, KL, BL, BM",
"def Deboucle3D(*args):\n return _BRepAlgo.BRepAlgo_Tool_Deboucle3D(*args)",
"def ProteinDelaunay(pdbid, chain):\n Data = []\n Head = ['PDBID', 'Quad', 'SortedQuad', 'RedAlpha', 'SortRedAlpha', 'V1', 'V2', 'V3', 'V4', 'L1', 'L2', 'L3', 'L4',\n 'L5', 'L6', 'SumL', 'AvgL', 'DevL', 'DevTetra', 'Vol', 'TF1', 'TF2', 'TF3', 'TF4', 'SumTF', 'AvgTF', 'hullArea', 'hullVolume']\n Data.append(Head)\n \n pointcloud, bf, resname = PointCloudData(pdbid, chainid)\n print \"Given PDB ID: \", pdbid\n print \"Given Chain ID:\", chain\n print \"Number of C-alpha points: \", len(pointcloud)\n\n # Convex Hull.\n ConvxHull = ConvexHull(pointcloud)\n hullArea = round(ConvxHull.area, 4)\n hullVolume = round(ConvxHull.volume, 4)\n\n # Delaunay Tessellation\n delaunay_hull = Delaunay(pointcloud, furthest_site=False, incremental=False, qhull_options='Qc') # noqa E501\n delaunay_points = delaunay_hull.points\n delaunay_vertices = delaunay_hull.vertices\n delaunay_simplices = delaunay_hull.simplices\n delaunay_neighbors = delaunay_hull.neighbors\n print \"Number of Delaunay Simplices: \", len(delaunay_simplices)\n\n for i in delaunay_vertices:\n\n # Obtain the indices of the vertices.\n one, two, three, four = i[2], i[1], i[3], i[0]\n\n # Obtain the coordinates based on the indices.\n cordA = pointcloud[one]\n cordB = pointcloud[two]\n cordC = pointcloud[three]\n cordD = pointcloud[four]\n\n # Get three letter amino acid names based on indices.\n a = resname[one]\n b = resname[two]\n c = resname[three]\n d = resname[four]\n\n # Get the temprature factors for the amino acids.\n a_tf = bf[one]\n b_tf = bf[two]\n c_tf = bf[three]\n d_tf = bf[four]\n\n # Get the string of three letter amino acids\n # forming the vertices of the tetrahedra.\n amino = [a, b, c, d]\n sortAmino = sorted(amino)\n amino = '-'.join(amino)\n sortAmino = '-'.join(sortAmino)\n\n # Get one letter code of the amino acids\n oneA = amino_dict.replace_all(a, amino_dict.one_letter)\n oneB = amino_dict.replace_all(b, amino_dict.one_letter)\n oneC = amino_dict.replace_all(c, amino_dict.one_letter)\n oneD = amino_dict.replace_all(d, amino_dict.one_letter)\n oneLet = [oneA, oneB, oneC, oneD]\n sortOneLet = sorted(oneLet)\n oneLet = ''.join(oneLet)\n sortOneLet = ''.join(sortOneLet)\n\n # Get Reduced Amino Acid Representations.\n flpA = amino_dict.replace_all(oneA, amino_dict.FLP)\n flpB = amino_dict.replace_all(oneB, amino_dict.FLP)\n flpC = amino_dict.replace_all(oneC, amino_dict.FLP)\n flpD = amino_dict.replace_all(oneD, amino_dict.FLP)\n flp = [flpA, flpB, flpC, flpD]\n sortflp = sorted(flp)\n flp = (''.join(flp)).upper()\n sortflp = (''.join(sortflp)).upper()\n\n # Calculate distances between the tetrahedra vertices.\n AB = np.linalg.norm(cordA - cordB)\n AC = np.linalg.norm(cordA - cordC)\n AD = np.linalg.norm(cordA - cordD)\n BC = np.linalg.norm(cordB - cordC)\n BD = np.linalg.norm(cordB - cordD)\n CD = np.linalg.norm(cordC - cordD)\n\n # Calculate the tetrahedra Volume.\n A_prime = cordA - cordD\n B_prime = cordB - cordD\n C_prime = cordC - cordD\n primes = [A_prime, B_prime, C_prime]\n primes = np.asarray(primes)\n det = np.linalg.det(primes)\n Vol = round((abs(det) / 6), 4)\n\n # Sum of Edge Lengths.\n SumL = (AB + AC + AD + BC + BD + CD)\n SumL = round(SumL, 4)\n\n # Average Edge Lengths.\n AvgL = round((SumL / 6), 4)\n\n # Deviation in Edge Lengths.\n devLp = (AB - AvgL) ** 2\n devLq = (AC - AvgL) ** 2\n devLr = (AD - AvgL) ** 2\n devLs = (BC - AvgL) ** 2\n devLt = (BD - AvgL) ** 2\n devLu = (CD - AvgL) ** 2\n devLy = [devLp, devLq, devLr, devLs, devLt, devLu]\n sumDevL = sum(devLy)\n DevL = round(math.sqrt(sumDevL / 6.0), 4)\n\n # Deviation in Tetrahedrality\n lenArr = [AB, AC, AD, BC, BD, CD]\n DevT = DevTetra(lenArr)\n\n # Sum and Average Temperature Factors.\n SumTF = round((a_tf + b_tf + c_tf + d_tf), 4)\n AvgTF = round(SumTF / 4, 4)\n\n # Data List\n line = [pdbid, oneLet, sortOneLet, flp, sortflp, one, two, three, four, AB, AC, AD, BC, BD, CD, SumL, AvgL, DevL, DevT, Vol, a_tf, b_tf, c_tf, d_tf, SumTF, AvgTF, hullArea, hullVolume]\n Data.append(line)\n\n ## Get coordinates based on the vertices.\n ## vertices_coords store the x, y, z coordinates for the delaunay_vertices.\n vertices_coords = pointcloud[delaunay_vertices]\n ## delaunay_indices store the indices for the delaunay_points.\n delaunay_indices = np.arange(len(delaunay_points))\n\n ## Get ready for mayavi plot.\n fig = mlab.figure(1, bgcolor=(0, 0, 0))\n fig.scene.disable_render = True\n ## Get a 3d scatter plot for the delaunay_points.\n mlab.points3d(delaunay_points[:,0], delaunay_points[:,1], delaunay_points[:,2], scale_factor=0.40, color=(0.99, 0.00, 0.00))\n ion_c_alpha_scatter = mlab.pipeline.scalar_scatter(delaunay_points[:,0], delaunay_points[:,1], delaunay_points[:,2], delaunay_indices)\n ion_c_alpha_delaunay = mlab.pipeline.delaunay3d(ion_c_alpha_scatter)\n ion_c_alpha_edges = mlab.pipeline.extract_edges(ion_c_alpha_delaunay)\n mlab.pipeline.surface(ion_c_alpha_edges, colormap='winter', opacity=0.4)\n mlab.savefig(pdbid + '_MayaviViz.x3d')\n mlab.show()\n return Data",
"def get_dem(myhuc, sources):\n logging.info(\"\")\n logging.info(\"Preprocessing DEM\")\n logging.info(\"==========================\")\n logging.info(\"downloading DEM\")\n\n # load shapefiles for the HUC of interest\n logging.info(\"loading HUC %s\"%myhuc)\n profile, huc = sources['HUC'].load_huc(myhuc)\n assert(profile['crs']['init'] == 'epsg:4269') # latlong\n\n dem_profile, dem = workflow.clip.clip_dem(huc, sources['DEM'])\n dem = dem[0,:,:] # only the first band\n return dem_profile, dem",
"def get_dnde(spectrum,energies):\n energies=units.tonumpy(energies,units.MeV)\n dnde=SpectrumPlotter.get_dnde_mev(spectrum,energies)\n return units.tosympy(dnde,units.ph/units.cm**2/units.s/units.MeV)",
"def detrend(xyz_csv, in_dem, aoi_shp):\n\n print('Detrending DEM...')\n detrended_dem = detrend_that_raster(xyz_csv=xyz_csv, in_dem=in_dem, aoi_shp=aoi_shp)\n print('Done')\n print('Detrended DEM @ %s' % detrended_dem)",
"def build_delaunay(coords, trim_dist='percentile_size', perc=99, return_dist=False):\n\n # pairs of indices of neighbors\n pairs = Voronoi(coords).ridge_points\n\n if trim_dist is not False:\n dist = distance_neighbors(coords, pairs)\n if not isinstance(trim_dist, (int, float)):\n trim_dist = find_trim_dist(dist=dist, method=trim_dist, nb_nodes=coords.shape[0], perc=perc)\n pairs = pairs[dist < trim_dist, :]\n return pairs",
"def dem(\n bounds, bounds_crs, dst_crs, out_file, resolution, interpolation, verbose, quiet\n):\n verbosity = verbose - quiet\n configure_logging(verbosity)\n if not dst_crs:\n dst_crs = \"EPSG:3005\"\n bcdata.get_dem(\n bounds,\n out_file=out_file,\n src_crs=bounds_crs,\n dst_crs=dst_crs,\n resolution=resolution,\n interpolation=interpolation,\n )",
"def Decoupler(data,decoupled_name,list_to_decouple=None,decimals=False):\n list_dec = parameters.outputs if list_to_decouple is None else copy.copy(list_to_decouple)\n # Get the arrays of mH, mA ordered as in the outputs\n list_rest = [i for i in data.columns if i not in list_dec] # All but outputs\n n_weights = len(list_dec)\n mHmA = np.empty((0,2))\n for ol in list_dec:\n if decimals:\n arr = np.array([[float(re.findall(r\"\\d*\\.\\d+|\\d+\", ol)[0]),float(re.findall(r\"\\d*\\.\\d+|\\d+\", ol)[1])]])\n else:\n arr = np.array([[int(re.findall(r'_\\d+', ol)[0].replace('_','')),int(re.findall(r'_\\d+', ol)[1].replace('_',''))]])\n mHmA = np.append(mHmA,arr,axis=0)\n # Get the numpy arrays #\n decouple = data[list_dec].values\n repeat = data[list_rest].values\n\n # Repeat and decouple #\n repeat = Repeater(repeat,n_weights)\n masses = np.tile(mHmA,(data.shape[0],1))\n decouple = decouple.flatten()\n\n # Concatenate and make DF #\n new_arr = np.c_[repeat,masses,decouple]\n df = pd.DataFrame(new_arr,columns=list_rest+['mH_MEM','mA_MEM',decoupled_name])\n\n return df",
"def __init__(self,num_pores=None,domain_size=None,**kwargs):\n super(Delaunay,self).__init__(**kwargs)\n if (num_pores and domain_size) is None:\n num_pores = 1\n domain_size = [1.0,1.0,1.0]\n else:\n self.generate(num_pores,domain_size)",
"def native(self) -> \"Grid2DIterate\":\r\n return Grid2DIterate(\r\n values=self,\r\n mask=self.mask,\r\n fractional_accuracy=self.fractional_accuracy,\r\n sub_steps=self.sub_steps,\r\n store_native=True,\r\n )",
"def make_dhdu(ham, controls, derivative_fn):\n\n dHdu = []\n for ctrl in controls:\n dHdu.append(derivative_fn(ham, ctrl['symbol']))\n\n return dHdu",
"def build_dcel(self):\r\n\r\n # Step 1: vertex list creation\r\n for v in self.vl:\r\n self.vertices.append(Vertex(v[0], v[1]))\r\n\r\n # Step 2: hedge list creation. Assignment of twins and\r\n # vertices\r\n\r\n for e in self.el:\r\n if e[0] >= 0 and e[1] >= 0:\r\n h1 = Hedge(self.vertices[e[0]],\r\n self.vertices[e[1]])\r\n h2 = Hedge(self.vertices[e[1]], self.vertices[e[0]])\r\n h1.twin = h2\r\n h2.twin = h1\r\n self.vertices[e[1]].hedgelist.append(h1)\r\n self.vertices[e[0]].hedgelist.append(h2)\r\n self.hedges.append(h2)\r\n self.hedges.append(h1)\r\n else:\r\n print(\"oh shit boi wadup\")\r\n\r\n # Step 3: Identification of next and prev hedges\r\n for index, v in enumerate(self.vertices):\r\n v.sort_incident()\r\n l = len(v.hedgelist)\r\n if l < 2:\r\n raise DcelError(\"Badly formed dcel: less than two hedges in vertex:\" + str(index))\r\n else:\r\n for i in range(l - 1):\r\n v.hedgelist[i].nexthedge = v.hedgelist[i + 1].twin\r\n v.hedgelist[i + 1].prevhedge = v.hedgelist[i]\r\n v.hedgelist[l - 1].nexthedge = v.hedgelist[0].twin\r\n v.hedgelist[0].prevhedge = v.hedgelist[l - 1]\r\n\r\n # Step 4: Face assignment\r\n provlist = self.hedges[:]\r\n nf = 0\r\n nh = len(self.hedges)\r\n\r\n while nh > 0:\r\n h = provlist.pop()\r\n nh -= 1\r\n # We check if the hedge already points to a face\r\n if h.face == None:\r\n f = Face()\r\n nf += 1\r\n # We link the hedge to the new face\r\n f.wedge = h\r\n f.wedge.face = f\r\n # And we traverse the boundary of the new face\r\n while not h.nexthedge is f.wedge:\r\n h = h.nexthedge\r\n h.face = f\r\n self.faces.append(f)\r\n # And finally we have to determine the external face\r\n for f in self.faces:\r\n f.external = f.area() < 0",
"def __repr__(self):\n return (\n '<DCEL ('\n 'vertices:\\n {obj.vertices},\\n'\n 'edges:\\n {obj.edges},\\n'\n 'faces:\\n {obj.faces}>'.format(obj=self)\n )",
"def dem_generation(lastoolsdir, lidardir, ground_poly, cores, units_code, keep_orig_pts, coarse_step,\n coarse_bulge, coarse_spike, coarse_down_spike,\n coarse_offset, fine_step, fine_bulge, fine_spike,\n fine_down_spike, fine_offset, aoi_shp,\n dem_resolution, dem_method, tri_meth, void_meth):\n\n # We carry input spatial ref over from the above process, but we should still convert from shp to ref object\n print('Processing LiDAR to remove vegetation points...')\n las_folder = lidardir + '\\\\las_files\\\\'\n process_lidar(lastoolsdir + '\\\\', las_folder, ground_poly, cores, units_code, keep_orig_pts,\n coarse_step,\n coarse_bulge, coarse_spike, coarse_down_spike,\n coarse_offset, fine_step, fine_bulge, fine_spike,\n fine_down_spike, fine_offset)\n print('Done')\n\n print('Generating a %sm resolution DEM...' % dem_resolution)\n dem = lidar_to_raster(lidardir, ground_poly, aoi_shp, dem_method, tri_meth, void_meth,\n m_cell_size=float(dem_resolution))\n print('Done')\n\n print('Generating hillshade raster for the DEM...')\n hill_out = lidardir + '\\\\hillshade.tif'\n arcpy.HillShade_3d(dem, hill_out)\n print('Done')",
"def create_from_hdu(cls, hdu, ebins):\n hpx = HPX.create_from_hdu(hdu, ebins)\n colnames = hdu.columns.names\n cnames = []\n if hpx.conv.convname == 'FGST_SRCMAP_SPARSE':\n pixs = hdu.data.field('PIX')\n chans = hdu.data.field('CHANNEL')\n keys = chans * hpx.npix + pixs\n vals = hdu.data.field('VALUE')\n nebin = len(ebins)\n data = np.zeros((nebin, hpx.npix))\n data.flat[keys] = vals\n else:\n for c in colnames:\n if c.find(hpx.conv.colstring) == 0:\n cnames.append(c)\n nebin = len(cnames)\n data = np.ndarray((nebin, hpx.npix))\n for i, cname in enumerate(cnames):\n data[i, 0:] = hdu.data.field(cname)\n\n return cls(data, hpx)",
"def from_dict(data, decomposer_=None, rx=None, ax=None):\n if decomposer_ is not None:\n decomposer = decomposer_\n else:\n decomposer = SOAPDecomposer(**data[\"decomposer\"])\n \n result = SOAPVector(data[\"P\"], decomposer)\n result.dcP = data[\"dcP\"]\n result.dnP = data[\"dnP\"]\n if rx is not None and data[\"rx\"] is None:# pragma: no cover\n result.rx = rx\n else:\n result.rx = data[\"rx\"]\n if ax is not None and data[\"ax\"] is None:# pragma: no cover\n result.ax = ax\n else:\n result.ax = data[\"ax\"]\n \n if data[\"cRDF\"] is not None:\n result.cRDF = DF(data[\"dcP\"], True, result.rx, decomposer,\n calculate=False)\n result.cRDF.df = data[\"cRDF\"]\n if data[\"nRDF\"] is not None:\n result.nRDF = DF(data[\"dnP\"], False, result.rx, decomposer,\n calculate=False)\n result.nRDF.df = data[\"nRDF\"]\n if data[\"cADF\"] is not None:\n result.cADF = DF(data[\"dcP\"], True, result.ax, decomposer,\n calculate=False)\n result.cADF.df = data[\"cADF\"]\n if data[\"nADF\"] is not None:\n result.nADF = DF(data[\"dnP\"], False, result.ax, decomposer,\n calculate=False)\n result.nADF.df = data[\"nADF\"]\n\n return result",
"def get_dekosky(self) -> pd.DataFrame:\n return pd.read_feather(self.figure_data_paths.dekosky_vh12_path)",
"def loadData(fname='Unstra.out2.00008.athdf'):\n #data=ath.athdf(fname,quantities=['B1','B2','B3'])\n time,data=ath.athdf(fname,quantities=['Bcc1'])\n bx = data['Bcc1']\n time,data=ath.athdf(fname,quantities=['Bcc2'])\n by = data['Bcc2']\n time,data=ath.athdf(fname,quantities=['Bcc3'])\n bz = data['Bcc3']\n x = data['x1f']\n y = data['x2f']\n z = data['x3f']\n # refinement\n rfac = 1.0\n ##if bx.shape[0] < 512:\n ## nz,ny,nx = bx.shape\n ## rfac = int(512/bx.shape[0])\n ## bx = np.repeat(bx,rfac,axis=0)\n ## bx = np.repeat(bx,rfac,axis=1)\n ## bx = np.repeat(bx,rfac,axis=2)\n ## by = np.repeat(by,rfac,axis=0)\n ## by = np.repeat(by,rfac,axis=1)\n ## by = np.repeat(by,rfac,axis=2)\n ## bz = np.repeat(bz,rfac,axis=0)\n ## bz = np.repeat(bz,rfac,axis=1)\n ## bz = np.repeat(bz,rfac,axis=2)\n # ---\n def curl(vx,vy,vz,dx,dy,dz):\n [dzvx,dyvx,dxvx] = np.gradient(vx)\n [dzvy,dyvy,dxvy] = np.gradient(vy)\n [dzvz,dyvz,dxvz] = np.gradient(vz)\n cx = dyvz/dy-dzvy/dz\n cy = dzvx/dz-dxvz/dx\n cz = dxvy/dx-dyvx/dy\n # No need to del the reference by one manually\n # allow python to perform its own garbage collection\n # after the function return cxyz\n #del dzvx\n #del dzvy\n #del dzvz\n return cx,cy,cz\n # ---\n dx = dz = (x[1]-x[0])/rfac\n dy = (y[1]-y[0])/rfac\n jx,jy,jz = curl(bx,by,bz,dx,dy,dz)\n j2 = jx**2+jy**2+jz**2\n return j2",
"def from_pyradex(self, integrated_flux, mol_data, line_width=1.0 * u.km / u.s,\n escapeProbGeom='lvg', iter=100,\n collider_density={'H2': 900*2.2}):\n\n try:\n import pyradex\n except ImportError:\n raise ImportError('Pyradex not installed. Please see \\\n https://github.com/keflavich/pyradex/blob/master/INSTALL.rst')\n\n if not isinstance(mol_data, Phys):\n raise ValueError('mol_data must be a `sbpy.data.phys` instance.')\n\n register('Production Rates', {'Radex': '2007A&A...468..627V'})\n\n # convert mol_tag JPLSpec identifier to verbose name if needed\n try:\n mol_data['lamda_name']\n name = mol_data['lamda_name'][0]\n name = name.lower()\n except KeyError:\n if not isinstance(mol_data['mol_tag'][0], str):\n cat = JPLSpec.get_species_table()\n mol = cat[cat['TAG'] == mol_data['mol_tag'][0]]\n name = mol['NAME'].data[0]\n name = name.lower()\n else:\n name = mol_data['mol_tag'][0]\n name = name.lower()\n\n # try various common instances of molecule names and check them against LAMDA before complaining\n try:\n Lamda.molecule_dict[name]\n except KeyError:\n try_name = \"{}@xpol\".format(name)\n try:\n Lamda.molecule_dict[try_name]\n name = try_name\n except KeyError:\n print('Molecule name {} not found in LAMDA, module tried {} and also\\\n found no molecule with this identifier within LAMDA. Please\\\n enter LAMDA identifiable name using mol_data[\"lamda_name\"]\\\n . Use Lamda.molecule_dict to see all available options.'.format(name, try_name))\n raise\n\n # define Temperature\n temp = mol_data['temp']\n\n # check for optional values within mol_data\n if 'temp_back' in mol_data:\n tbackground = mol_data['temp_back']\n else:\n tbackground = 2.730 * u.K\n\n # define cdensity and iteration parameters\n cdensity = mol_data['cdensity'].to(1 / (u.cm * u.cm))\n cdensity_low = cdensity - (cdensity*0.9)\n cdensity_high = cdensity + (cdensity*9)\n # range for 400 iterations\n cdensity_range = np.linspace(cdensity_low, cdensity_high, iter)\n fluxes = []\n column_density = []\n\n with tempfile.TemporaryDirectory() as datapath:\n for i in cdensity_range:\n R = pyradex.Radex(column=i, deltav=line_width,\n tbackground=tbackground, species=name,\n temperature=temp, datapath=datapath,\n escapeProbGeom=escapeProbGeom,\n collider_densities=collider_density)\n\n table = R()\n\n # find closest matching frequency to user defined\n indx = (np.abs(table['frequency']-mol_data['t_freq'])).argmin()\n radexfreq = table['frequency'][indx]\n # get table for that frequency\n values = table[table['frequency'] == radexfreq]\n # use eq in io.f from Pyradex to get integrated flux in K * km/s\n int_flux_pyradex = 1.0645 * values['T_B'] * line_width\n\n fluxes.append(int_flux_pyradex)\n column_density.append(i)\n\n # closest matching integrated flux from pyradex\n\n fluxes = np.array(fluxes)\n\n index_flux = (\n np.abs(fluxes-integrated_flux.to(u.K * u.km / u.s).value)).argmin()\n\n # corresponding column density in 1/cm^2\n column_density = column_density[index_flux]\n print('Closest Integrated Flux:{}'.format(\n fluxes[index_flux] * u.K * u.km / u.s))\n print('Given Integrated Flux: {}'.format(integrated_flux))\n\n return column_density",
"def BRepAlgo_Tool_Deboucle3D(*args):\n return _BRepAlgo.BRepAlgo_Tool_Deboucle3D(*args)",
"def __init__(self,points,prescribedValues,PDEMatrix=np.eye(2),functionRHS=lambda x: 0):\n \n self.functionRHS= functionRHS\n\n #referenceElement holds the points of the reference element from which all other elements\n #are calculated\n self.referenceElement = np.array([[0,0],[1.,0],[0,1.]])\n\n #Calculate a delaunay triangulation of the input points\n self.triangulation = Delaunay(points)\n\n #Uses to initiate the stiffness matrix and the Rhs with the correct size\n self.numberDOF = np.size(self.triangulation.points[:,0])\n\n #is the biggest side of the triangulation\n self.maxDiam = 0\n\n self.prescribedValues = [] \n if self.checkPrescribedValues(prescribedValues):\n self.prescribedValues = prescribedValues\n else:\n print(\"Error: Prescribed Value index not an integer\")\n #the 3 linear Basis funtctions on the reference triangle\n #each has the value 1 at one points and 0 at the other points\n #Numbering of the vertices according to self.referenceElement\n self.linearBasis = []\n self.linearBasis.append(lambda x : 1-x[0]-x[1])\n self.linearBasis.append(lambda x : x[0])\n self.linearBasis.append(lambda x : x[1])\n\n #gradients of the basis functions on a reference triangle\n self.gradBasis = []\n self.gradBasis.append(np.array([-1.,-1])) \n self.gradBasis.append(np.array([1.,0]))\n self.gradBasis.append(np.array([0,1.]))\n\n #Holds integral of two basisfunctons over one reference triangle\n self.elementaryBasisMatrix = 1.0/12*np.array([[1.,0.5,0.5],[0.5,1.,0.5],[0.5,0.5,1.]])\n\n #initiate Righthandside with zeros\n self.rightHandSide = np.zeros(self.numberDOF)\n \n #strong form of PDE is: div(A dot grad(u)) = f, where A is PDEMatrix\n self.PDEMatrix= PDEMatrix",
"def __call__(self, jd):\n e = self.ephemeris.earth(jd)\n tpos_au, tvel_au_per_d = self._position_and_velocity(jd)\n t = Barycentric(e.position.au + tpos_au,\n e.velocity.au_per_d + tvel_au_per_d,\n jd)\n t.geocentric = False # test, then get rid of this attribute\n t.rGCRS = tpos_au\n t.vGCRS = tvel_au_per_d\n t.topos = self\n t.ephemeris = self.ephemeris\n t.altaz_rotation = self._altaz_rotation(jd)\n return t",
"def show_derivative(self):\n for trace in self.plotWidget.plotDataItems:\n dt = float(trace.attrs['dt'])\n dtrace = np.diff(trace.data)\n x = pgplot.make_xvector(dtrace, dt)\n self.plotWidget.plot(x, dtrace, pen=pg.mkPen('r'))",
"def calculate_derivative(current_data):\n\n # Declare array with first value equals zero to build gradient\n derivative_data = [0]\n derivative_data = np.gradient(current_data[\"4. close\"])\n # Add gradient values as column to current dataframe\n current_data[\"gradient\"] = derivative_data\n\n return current_data",
"def _lindblad_driven(H, rho0, c_ops=None, e_ops=None, Nt=1, dt=0.005, t0=0.,\n return_result=True):\n\n def calculateH(t):\n\n Ht = H[0]\n\n for i in range(1, len(H)):\n Ht += - H[i][1](t) * H[i][0]\n\n return Ht\n\n nstates = H[0].shape[-1]\n\n if c_ops is None:\n c_ops = []\n if e_ops is None:\n e_ops = []\n\n\n # initialize the density matrix\n rho = rho0.copy()\n rho = rho.astype(complex)\n\n\n\n t = t0\n\n if return_result == False:\n\n f_dm = open('den_mat.dat', 'w')\n fmt_dm = '{} ' * (nstates**2 + 1) + '\\n'\n\n f_obs = open('obs.dat', 'w')\n fmt = '{} '* (len(e_ops) + 1) + '\\n'\n\n for k in range(Nt):\n\n t += dt\n\n Ht = calculateH(t)\n\n rho = rk4(rho, liouvillian, dt, Ht, c_ops)\n\n # dipole-dipole auto-corrlation function\n #cor = np.trace(np.matmul(d, rho))\n\n # take a partial trace to obtain the rho_el\n # compute observables\n observables = np.zeros(len(e_ops), dtype=complex)\n\n for i, obs_op in enumerate(e_ops):\n observables[i] = obs_dm(rho, obs_op)\n\n f_obs.write(fmt.format(t, *observables))\n\n\n f_obs.close()\n f_dm.close()\n\n return rho\n\n else:\n\n rholist = [] # store density matries\n\n result = Result(dt=dt, Nt=Nt, rho0=rho0)\n\n observables = np.zeros((Nt, len(e_ops)), dtype=complex)\n\n for k in range(Nt):\n\n t += dt\n\n Ht = calculateH(t)\n\n rho = rk4(rho, liouvillian, dt, Ht, c_ops)\n\n rholist.append(rho.copy())\n\n observables[k, :] = [obs_dm(rho, op) for op in e_ops]\n\n\n result.observables = observables\n result.rholist = rholist\n\n return result"
] | [
"0.553893",
"0.5346823",
"0.52831537",
"0.50846463",
"0.49604708",
"0.48745608",
"0.48555076",
"0.485055",
"0.4848358",
"0.4832407",
"0.47853115",
"0.47777563",
"0.47689658",
"0.47675616",
"0.47572455",
"0.474961",
"0.4736425",
"0.47201127",
"0.4714435",
"0.47010985",
"0.4685433",
"0.46528113",
"0.46408987",
"0.4638567",
"0.46346027",
"0.4631739",
"0.4628875",
"0.46147752",
"0.46054053",
"0.4568641"
] | 0.5686502 | 0 |
Add an edge to DCEL if it doesn't already exists, otherwise return the existing edge. | def add_edge(self, edge):
try:
edge_idx = self.edges.index(edge)
return self.edges[edge_idx]
except Exception:
self.edges.append(edge)
return edge | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_edge(self, edge):\n\n add_egde = True\n for edge_this in self.edges:\n if edge_this == edge:\n add_egde = False\n\n if add_egde:\n self.edges.append(edge)\n\n return self",
"def add_edge(self, edge):\n edge = set(edge)\n (vertex1, vertex2) = tuple(edge)\n if vertex1 in self.graph_dict:\n self.graph_dict[vertex1].append(vertex2)\n else:\n self.graph_dict[vertex1] = [vertex2]\n return edge",
"def add_edge(self, edge):\n if(self.has_edge(edge) == 0):\n self.__graph_dict[edge[0]].append(edge[1])",
"def add_edge(self, edge: e.Edge) -> None:\n if edge not in self.edges:\n self.edges.append(edge)\n self.num_edges = self.num_edges + 1",
"def add_edge(self, edge):\n assert edge not in self.edges\n self.edges.append(edge)",
"def add_edge(self,_check_existing=True,**kwargs):\n j=None\n if '_index' in kwargs:\n j=kwargs.pop('_index')\n if j==len(self.edges):\n # this is the index we'd get anyway.\n j=None\n else:\n assert len(self.edges)>j\n assert self.edges[j]['deleted']\n\n if _check_existing:\n j_exists=self.nodes_to_edge(*kwargs['nodes'])\n if j_exists is not None:\n raise GridException(\"Edge already exists\")\n \n if j is None:\n e=np.zeros( (),dtype=self.edge_dtype)\n self.edges=array_append(self.edges,e)\n j=len(self.edges)-1\n\n # default values\n self.edges[j]['cells'][:]=-1\n self.edges[j]['deleted']=False\n\n for k,v in six.iteritems(kwargs):\n self.edges[k][j]=v\n\n # most basic checks on edge validity:\n if self.edges[j]['nodes'][0]==self.edges[j]['nodes'][1]:\n raise self.InvalidEdge('duplicate nodes')\n\n if self._node_to_edges is not None:\n n1,n2=self.edges['nodes'][j]\n self._node_to_edges[n1].append(j)\n self._node_to_edges[n2].append(j)\n\n self.push_op(self.unadd_edge,j)\n return j",
"def add_edge(self, edge):\n\n if edge.uuid is None:\n edge.uuid = self._generate_uuid()\n\n if edge.uuid in self._edges:\n error_str = \"Trying to add an already existing edge with uuid: \"\\\n + str(edge.uuid)\n raise KeyError(error_str)\n\n self._edges[edge.uuid] = Edge.from_edge(edge)\n\n return edge.uuid",
"def add_edge(self, edge, directed=False, auto=False):\n (v1, v2) = edge.get_id()\n if v1 in self.vertices.keys() and v2 in self.vertices.keys():\n if directed:\n if auto:\n self.edges[edge.get_id()] = edge\n else:\n if v1 != v2:\n self.edges[edge.get_id()] = edge\n else:\n if self.edges.get((v2, v1)) is None:\n if auto:\n self.edges[edge.get_id()] = edge\n else:\n if v1 != v2:\n self.edges[edge.get_id()] = edge",
"def add_edge(self, edge):\n edge = set(edge)\n (vertex1, vertex2) = tuple(edge)\n if vertex1 in self.__graph_dict:\n self.__graph_dict[vertex1].append(vertex2)\n else:\n self.__graph_dict[vertex1] = [vertex2]",
"def add_edge(self, edge):\n edge = set(edge)\n (vertex1, vertex2) = tuple(edge)\n if vertex1 in self.graph_dict:\n self.graph_dict[vertex1].append(vertex2)\n else:\n self.graph_dict[vertex1] = [vertex2]",
"def addEdge(self, edge):\n Digraph.addEdge(self, edge)\n rev = Edge(edge.getDestination(), edge.getSource())\n Digraph.addEdge(self, rev)",
"def add_edge(self, edge):\n self[edge[0]][edge[1]] = edge\n self[edge[1]][edge[0]] = edge",
"def _add_edge(self, a, b):\n e = Edge2(a, b)\n i = bisect(self.edges, e)\n \n # if edge between these vertices exists just return it\n if len(self.edges) > i and self.edges[i] == e:\n return self.edges[i]\n \n # otherwise add new edge in sorted position and return it\n self.edges.insert(i, e)\n return e",
"def add_edge(self, edge):\r\n edge = set(edge)\r\n (vertex1, vertex2) = tuple(edge)\r\n \r\n if vertex1 in self.__graph_dict.keys() and vertex2 in self.__graph_dict.keys():\r\n if vertex2 in self.__graph_dict[vertex1] and vertex1 in self.__graph_dict[vertex2]:\r\n return\r\n self.__graph_dict[vertex1].add(vertex2)\r\n self.__graph_dict[vertex2].add(vertex1)\r\n elif vertex1 not in self.__graph_dict.keys() and vertex2 in self.__graph_dict.keys():\r\n self.__graph_dict[vertex1] = {vertex2}\r\n self.__graph_dict[vertex2].add(vertex1)\r\n elif vertex1 in self.__graph_dict.keys() and vertex2 not in self.__graph_dict.keys():\r\n self.__graph_dict[vertex2] = {vertex1}\r\n self.__graph_dict[vertex1].add(vertex2)\r\n else:\r\n self.__graph_dict[vertex1] = {vertex2}\r\n self.__graph_dict[vertex2] = {vertex1}",
"def add_edge(self, edge):\n src = edge.get_source()\n dest = edge.get_destination()\n #weightEdge = WeightedEdge(src, dest, edge.get_total_distance(), edge.get_outdoor_distance())\n if not (src in self.edges and dest in self.edges):\n raise ValueError('Node not in graph')\n self.edges[src].append(dest)\n #self.edges[src].append(weightEdge)",
"def addEdge(self, e):\n v = e.either()\n w = e.other(v)\n self._validateVertex(v)\n self._validateVertex(w)\n self._adj[v].add(e)\n self._adj[w].add(e)\n self._E += 1",
"def addEdge(self, edge):\n src = edge.getSource()\n dest = edge.getDestination()\n if not (src in self.edges and dest in self.edges):\n raise ValueError('Node not in graph')\n self.edges[src].append(dest)",
"def add_edge(self, ed):\n self.edge.append(ed)\n self.update_node2edge()",
"def add_edge(self, e):\n a, b = e\n self[a][b] = e\n self[b][a] = e",
"def addEdge(self, edge: Edge):\n self.edges.append(edge)",
"def add_edge(self, edge_name, edge, overwrite=False):\n if (edge_name in self.edges) and (not overwrite):\n raise ValueError(\"Attempted to overwrite the edge \" + edge_name + \".\")\n else:\n self.edges[edge_name] = edge\n self.build_order += [edge_name]",
"def add_neighbor(self, edge: \"Edge\") -> None:\r\n if edge is None or (edge.source != self and edge.target != self):\r\n return\r\n\r\n if edge.source == self:\r\n other: Node = edge.target\r\n elif edge.target == self:\r\n other: Node = edge.source\r\n else:\r\n raise ValueError(\"Tried to add a neighbor with an invalid edge.\")\r\n\r\n edge_key: Tuple(int, int) = edge.key\r\n\r\n # The graph is considered undirected, check neighbor existence accordingly.\r\n if self._neighbors.get(edge_key) or self._neighbors.get((edge_key[1], edge_key[0])):\r\n return # The neighbor is already added.\r\n\r\n self._neighbors[edge_key] = edge\r\n self.dispatch_event(NeighborAddedEvent(other))",
"def add_edge(self, e):\n x = min(e)\n y = max(e)\n if x not in self._vertices:\n self.add_vertex(x)\n if y not in self._vertices:\n self.add_vertex(y)\n self._edges.add( (x, y) )",
"def add_edge(self, edge=None):\n src_key, dest_key = (edge['src_key'], edge['dest_key'])\n if dest_key is self.ROOT_TASK_KEY:\n raise Exception(\"Root task can not be an edge dest\")\n edge_key = (src_key, dest_key)\n self._edges[edge_key] = edge\n self._edges_by_key[src_key]['outgoing'][edge_key] = edge\n self._edges_by_key[dest_key]['incoming'][edge_key] = edge",
"def add_edge(self, edge):\n edge = set(edge)\n (vertex1, vertex2) = tuple(edge)\n if vertex1 not in self.__graph_dict:\n self.__graph_dict[vertex1] = []\n dbg_str = \"Vertex being initialized ..\" + str(vertex1)\n # logging.debug(dbg_str)\n if vertex2 not in self.__graph_dict:\n self.__graph_dict[vertex2] = []\n dbg_str = \"Vertex being initialized ..\" + str(vertex2)\n # logging.debug(dbg_str)\n if vertex2 not in self.__graph_dict[vertex1]:\n self.__graph_dict[vertex1].append(vertex2)\n dbg_str = \"Appending .. \" + str(vertex2), \"to ->\" +str(vertex1)\n # logging.debug(dbg_str)\n\n if vertex1 not in self.__graph_dict[vertex2]:\n self.__graph_dict[vertex2].append(vertex1)\n dbg_str = \"Appending .. \" + str(vertex1), \"to ->\" +str(vertex2)\n # logging.debug(dbg_str)",
"def add_edge(self, e):\n v, w = e\n self[v][w] = e\n self[w][v] = e",
"def addEdge(self, edge):\n\n startVertex = edge.startVertex\n endVertex = edge.endVertex\n\n startVertexNumber = startVertex.vertexNumber\n endVertexNumber = endVertex.vertexNumber\n \n vertexIndex = self.vertexIndex\n parentIndex = self.parentIndex\n parentEdgeIndex = self.parentEdgeIndex\n\n if startVertexNumber == endVertexNumber:\n raise EdgeError(startVertexNumber, endVertexNumber, ErrorMessages.noSelfLoops)\n\n try:\n parentIndex[startVertexNumber].index(endVertexNumber)\n raise EdgeError(startVertexNumber, endVertexNumber, ErrorMessages.edgeAlreadyExists)\n except (ValueError, KeyError):\n self.__lastEdgeNumber += 1\n self.edgeIndex[self.__lastEdgeNumber] = edge\n \n if startVertexNumber not in vertexIndex:\n vertexIndex[startVertexNumber] = startVertex\n\n if endVertexNumber not in vertexIndex:\n vertexIndex[endVertexNumber] = endVertex\n\n if startVertexNumber not in parentIndex:\n parentIndex[startVertexNumber] = [endVertexNumber]\n else:\n parentIndex[startVertexNumber].append(endVertexNumber)\n\n if endVertexNumber not in parentIndex:\n parentIndex[endVertexNumber] = [startVertexNumber]\n else:\n parentIndex[endVertexNumber].append(startVertexNumber)\n\n if startVertexNumber not in parentEdgeIndex:\n parentEdgeIndex[startVertexNumber] = [[endVertexNumber, self.__lastEdgeNumber]]\n else:\n parentEdgeIndex[startVertexNumber].append([endVertexNumber, self.__lastEdgeNumber])\n\n if endVertexNumber not in parentEdgeIndex:\n parentEdgeIndex[endVertexNumber] = [[startVertexNumber, self.__lastEdgeNumber]]\n else:\n parentEdgeIndex[endVertexNumber].append([startVertexNumber, self.__lastEdgeNumber])\n \n try:\n self.__degreeCount[startVertexNumber] += 1\n except KeyError:\n self.__degreeCount[startVertexNumber] = 1\n \n try:\n self.__degreeCount[endVertexNumber] += 1\n except KeyError:\n self.__degreeCount[endVertexNumber] = 1",
"def _add_edge(self, graph: Graph, vertex1: Vertex, vertex2: Vertex) \\\n -> None:\n new_edge = Edge(vertex1, vertex2)\n graph.add(new_edge)",
"def addEdge(self, startNode, endNode):\n if self.directedOrUndirected == 'undirected':\n # no need to check if edge already exists because we're using\n # defaultdict\n\n self.graph[startNode].append(endNode)\n self.graph[endNode].append(startNode)\n else:\n self.graph[startNode].append(endNode)",
"def add_edge(self, current_id=None, in_vertex_id=None, out_vertex_id=None, label=None, properties=None):\n if current_id is None:\n done = False\n while not done:\n next_id = self.get_next_id()\n\n if next_id not in self.edges:\n current_id = next_id\n done = True\n else:\n if current_id in self.edges:\n raise Exception('Edge with ID Already Exist')\n\n in_vertex = self.vertices.get(in_vertex_id)\n out_vertex = self.vertices.get(out_vertex_id)\n\n if out_vertex is None or in_vertex is None:\n raise Exception('In_vertex or out_vertex not found')\n\n current_edge = Edge(self, current_id,\n label=label,\n in_vertex=in_vertex,\n out_vertex=out_vertex,\n properties=properties)\n\n self.edges[current_id] = current_edge\n in_vertex.add_out_edge(label, current_edge)\n out_vertex.add_in_edge(label, current_edge)\n return current_edge"
] | [
"0.7570172",
"0.7543887",
"0.7385329",
"0.731582",
"0.7228451",
"0.72140056",
"0.7205442",
"0.7191608",
"0.7144214",
"0.712575",
"0.7108078",
"0.7094593",
"0.7083264",
"0.6992474",
"0.6966737",
"0.6870384",
"0.6825222",
"0.6818147",
"0.67740756",
"0.67177004",
"0.6700951",
"0.6675162",
"0.66404897",
"0.66286594",
"0.6616903",
"0.65793836",
"0.6552262",
"0.64829767",
"0.64267427",
"0.64159346"
] | 0.81017184 | 0 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.